Line data Source code
1 : /*
2 : * If not stated otherwise in this file or this component's LICENSE file the
3 : * following copyright and licenses apply:
4 : *
5 : * Copyright 2022 Sky UK
6 : *
7 : * Licensed under the Apache License, Version 2.0 (the "License");
8 : * you may not use this file except in compliance with the License.
9 : * You may obtain a copy of the License at
10 : *
11 : * http://www.apache.org/licenses/LICENSE-2.0
12 : *
13 : * Unless required by applicable law or agreed to in writing, software
14 : * distributed under the License is distributed on an "AS IS" BASIS,
15 : * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 : * See the License for the specific language governing permissions and
17 : * limitations under the License.
18 : */
19 :
20 : #include <chrono>
21 : #include <cinttypes>
22 : #include <stdexcept>
23 :
24 : #include "FlushWatcher.h"
25 : #include "GstDispatcherThread.h"
26 : #include "GstGenericPlayer.h"
27 : #include "GstProtectionMetadata.h"
28 : #include "IGstTextTrackSinkFactory.h"
29 : #include "IMediaPipeline.h"
30 : #include "ITimer.h"
31 : #include "RialtoServerLogging.h"
32 : #include "TypeConverters.h"
33 : #include "Utils.h"
34 : #include "WorkerThread.h"
35 : #include "tasks/generic/GenericPlayerTaskFactory.h"
36 :
37 : namespace
38 : {
39 : /**
40 : * @brief Report position interval in ms.
41 : * The position reporting timer should be started whenever the PLAYING state is entered and stopped
42 : * whenever the session moves to another playback state.
43 : */
44 : constexpr std::chrono::milliseconds kPositionReportTimerMs{250};
45 : constexpr std::chrono::seconds kSubtitleClockResyncInterval{10};
46 :
47 1 : bool operator==(const firebolt::rialto::server::SegmentData &lhs, const firebolt::rialto::server::SegmentData &rhs)
48 : {
49 2 : return (lhs.position == rhs.position) && (lhs.resetTime == rhs.resetTime) && (lhs.appliedRate == rhs.appliedRate) &&
50 2 : (lhs.stopPosition == rhs.stopPosition);
51 : }
52 : } // namespace
53 :
54 : namespace firebolt::rialto::server
55 : {
56 : std::weak_ptr<IGstGenericPlayerFactory> GstGenericPlayerFactory::m_factory;
57 :
58 3 : std::shared_ptr<IGstGenericPlayerFactory> IGstGenericPlayerFactory::getFactory()
59 : {
60 3 : std::shared_ptr<IGstGenericPlayerFactory> factory = GstGenericPlayerFactory::m_factory.lock();
61 :
62 3 : if (!factory)
63 : {
64 : try
65 : {
66 3 : factory = std::make_shared<GstGenericPlayerFactory>();
67 : }
68 0 : catch (const std::exception &e)
69 : {
70 0 : RIALTO_SERVER_LOG_ERROR("Failed to create the gstreamer player factory, reason: %s", e.what());
71 : }
72 :
73 3 : GstGenericPlayerFactory::m_factory = factory;
74 : }
75 :
76 3 : return factory;
77 : }
78 :
79 1 : std::unique_ptr<IGstGenericPlayer> GstGenericPlayerFactory::createGstGenericPlayer(
80 : IGstGenericPlayerClient *client, IDecryptionService &decryptionService, MediaType type,
81 : const VideoRequirements &videoRequirements,
82 : const std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapperFactory> &rdkGstreamerUtilsWrapperFactory)
83 : {
84 1 : std::unique_ptr<IGstGenericPlayer> gstPlayer;
85 :
86 : try
87 : {
88 1 : auto gstWrapperFactory = firebolt::rialto::wrappers::IGstWrapperFactory::getFactory();
89 1 : auto glibWrapperFactory = firebolt::rialto::wrappers::IGlibWrapperFactory::getFactory();
90 1 : std::shared_ptr<firebolt::rialto::wrappers::IGstWrapper> gstWrapper;
91 1 : std::shared_ptr<firebolt::rialto::wrappers::IGlibWrapper> glibWrapper;
92 1 : std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapper> rdkGstreamerUtilsWrapper;
93 1 : if ((!gstWrapperFactory) || (!(gstWrapper = gstWrapperFactory->getGstWrapper())))
94 : {
95 0 : throw std::runtime_error("Cannot create GstWrapper");
96 : }
97 1 : if ((!glibWrapperFactory) || (!(glibWrapper = glibWrapperFactory->getGlibWrapper())))
98 : {
99 0 : throw std::runtime_error("Cannot create GlibWrapper");
100 : }
101 2 : if ((!rdkGstreamerUtilsWrapperFactory) ||
102 2 : (!(rdkGstreamerUtilsWrapper = rdkGstreamerUtilsWrapperFactory->createRdkGstreamerUtilsWrapper())))
103 : {
104 0 : throw std::runtime_error("Cannot create RdkGstreamerUtilsWrapper");
105 : }
106 : gstPlayer = std::make_unique<
107 2 : GstGenericPlayer>(client, decryptionService, type, videoRequirements, gstWrapper, glibWrapper,
108 2 : rdkGstreamerUtilsWrapper, IGstInitialiser::instance(), std::make_unique<FlushWatcher>(),
109 2 : IGstSrcFactory::getFactory(), common::ITimerFactory::getFactory(),
110 2 : std::make_unique<GenericPlayerTaskFactory>(client, gstWrapper, glibWrapper,
111 : rdkGstreamerUtilsWrapper,
112 2 : IGstTextTrackSinkFactory::createFactory()),
113 2 : std::make_unique<WorkerThreadFactory>(), std::make_unique<GstDispatcherThreadFactory>(),
114 3 : IGstProtectionMetadataHelperFactory::createFactory());
115 1 : }
116 0 : catch (const std::exception &e)
117 : {
118 0 : RIALTO_SERVER_LOG_ERROR("Failed to create the gstreamer player, reason: %s", e.what());
119 : }
120 :
121 1 : return gstPlayer;
122 : }
123 :
124 212 : GstGenericPlayer::GstGenericPlayer(
125 : IGstGenericPlayerClient *client, IDecryptionService &decryptionService, MediaType type,
126 : const VideoRequirements &videoRequirements,
127 : const std::shared_ptr<firebolt::rialto::wrappers::IGstWrapper> &gstWrapper,
128 : const std::shared_ptr<firebolt::rialto::wrappers::IGlibWrapper> &glibWrapper,
129 : const std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapper> &rdkGstreamerUtilsWrapper,
130 : const IGstInitialiser &gstInitialiser, std::unique_ptr<IFlushWatcher> &&flushWatcher,
131 : const std::shared_ptr<IGstSrcFactory> &gstSrcFactory, std::shared_ptr<common::ITimerFactory> timerFactory,
132 : std::unique_ptr<IGenericPlayerTaskFactory> taskFactory, std::unique_ptr<IWorkerThreadFactory> workerThreadFactory,
133 : std::unique_ptr<IGstDispatcherThreadFactory> gstDispatcherThreadFactory,
134 212 : std::shared_ptr<IGstProtectionMetadataHelperFactory> gstProtectionMetadataFactory)
135 212 : : m_gstPlayerClient(client), m_gstWrapper{gstWrapper}, m_glibWrapper{glibWrapper},
136 424 : m_rdkGstreamerUtilsWrapper{rdkGstreamerUtilsWrapper}, m_timerFactory{timerFactory},
137 636 : m_taskFactory{std::move(taskFactory)}, m_flushWatcher{std::move(flushWatcher)}
138 : {
139 212 : RIALTO_SERVER_LOG_DEBUG("GstGenericPlayer is constructed.");
140 :
141 212 : gstInitialiser.waitForInitialisation();
142 :
143 212 : m_context.decryptionService = &decryptionService;
144 :
145 212 : if ((!gstSrcFactory) || (!(m_context.gstSrc = gstSrcFactory->getGstSrc())))
146 : {
147 2 : throw std::runtime_error("Cannot create GstSrc");
148 : }
149 :
150 210 : if (!timerFactory)
151 : {
152 1 : throw std::runtime_error("TimeFactory is invalid");
153 : }
154 :
155 418 : if ((!gstProtectionMetadataFactory) ||
156 418 : (!(m_protectionMetadataWrapper = gstProtectionMetadataFactory->createProtectionMetadataWrapper(m_gstWrapper))))
157 : {
158 0 : throw std::runtime_error("Cannot create protection metadata wrapper");
159 : }
160 :
161 : // Ensure that rialtosrc has been initalised
162 209 : m_context.gstSrc->initSrc();
163 :
164 : // Start task thread
165 209 : if ((!workerThreadFactory) || (!(m_workerThread = workerThreadFactory->createWorkerThread())))
166 : {
167 0 : throw std::runtime_error("Failed to create the worker thread");
168 : }
169 :
170 : // Initialise pipeline
171 209 : switch (type)
172 : {
173 208 : case MediaType::MSE:
174 : {
175 208 : initMsePipeline();
176 208 : break;
177 : }
178 1 : default:
179 : {
180 1 : resetWorkerThread();
181 1 : throw std::runtime_error("Media type not supported");
182 : }
183 : }
184 :
185 : // Check the video requirements for a limited video.
186 : // If the video requirements are set to anything lower than the minimum, this playback is assumed to be a secondary
187 : // video in a dual video scenario.
188 208 : if ((kMinPrimaryVideoWidth > videoRequirements.maxWidth) || (kMinPrimaryVideoHeight > videoRequirements.maxHeight))
189 : {
190 8 : RIALTO_SERVER_LOG_MIL("Secondary video playback selected");
191 8 : bool westerossinkSecondaryVideoResult = setWesterossinkSecondaryVideo();
192 8 : bool ermContextResult = setErmContext();
193 8 : if (!westerossinkSecondaryVideoResult && !ermContextResult)
194 : {
195 1 : resetWorkerThread();
196 1 : termPipeline();
197 1 : throw std::runtime_error("Could not set secondary video");
198 : }
199 7 : }
200 : else
201 : {
202 200 : RIALTO_SERVER_LOG_MIL("Primary video playback selected");
203 : }
204 :
205 : m_gstDispatcherThread =
206 207 : gstDispatcherThreadFactory->createGstDispatcherThread(*this, m_context.pipeline, m_gstWrapper);
207 292 : }
208 :
209 414 : GstGenericPlayer::~GstGenericPlayer()
210 : {
211 207 : RIALTO_SERVER_LOG_DEBUG("GstGenericPlayer is destructed.");
212 207 : m_gstDispatcherThread.reset();
213 :
214 207 : resetWorkerThread();
215 :
216 207 : termPipeline();
217 414 : }
218 :
219 208 : void GstGenericPlayer::initMsePipeline()
220 : {
221 : // Make playbin
222 208 : m_context.pipeline = m_gstWrapper->gstElementFactoryMake("playbin", "media_pipeline");
223 : // Set pipeline flags
224 208 : setPlaybinFlags(true);
225 :
226 : // Set callbacks
227 208 : m_glibWrapper->gSignalConnect(m_context.pipeline, "source-setup", G_CALLBACK(&GstGenericPlayer::setupSource), this);
228 208 : m_glibWrapper->gSignalConnect(m_context.pipeline, "element-setup", G_CALLBACK(&GstGenericPlayer::setupElement), this);
229 208 : m_glibWrapper->gSignalConnect(m_context.pipeline, "deep-element-added",
230 : G_CALLBACK(&GstGenericPlayer::deepElementAdded), this);
231 :
232 : // Set uri
233 208 : m_glibWrapper->gObjectSet(m_context.pipeline, "uri", "rialto://", nullptr);
234 :
235 : // Check playsink
236 208 : GstElement *playsink = (m_gstWrapper->gstBinGetByName(GST_BIN(m_context.pipeline), "playsink"));
237 208 : if (playsink)
238 : {
239 207 : m_glibWrapper->gObjectSet(G_OBJECT(playsink), "send-event-mode", 0, nullptr);
240 207 : m_gstWrapper->gstObjectUnref(playsink);
241 : }
242 : else
243 : {
244 1 : GST_WARNING("No playsink ?!?!?");
245 : }
246 208 : if (GST_STATE_CHANGE_FAILURE == m_gstWrapper->gstElementSetState(m_context.pipeline, GST_STATE_READY))
247 : {
248 1 : GST_WARNING("Failed to set pipeline to READY state");
249 : }
250 208 : RIALTO_SERVER_LOG_MIL("New RialtoServer's pipeline created");
251 : }
252 :
253 209 : void GstGenericPlayer::resetWorkerThread()
254 : {
255 209 : m_postponedFlushes.clear();
256 : // Shutdown task thread
257 209 : m_workerThread->enqueueTask(m_taskFactory->createShutdown(*this));
258 209 : m_workerThread->join();
259 209 : m_workerThread.reset();
260 : }
261 :
262 208 : void GstGenericPlayer::termPipeline()
263 : {
264 208 : if (m_finishSourceSetupTimer && m_finishSourceSetupTimer->isActive())
265 : {
266 0 : m_finishSourceSetupTimer->cancel();
267 : }
268 :
269 208 : m_finishSourceSetupTimer.reset();
270 :
271 257 : for (auto &elem : m_context.streamInfo)
272 : {
273 49 : StreamInfo &streamInfo = elem.second;
274 51 : for (auto &buffer : streamInfo.buffers)
275 : {
276 2 : m_gstWrapper->gstBufferUnref(buffer);
277 : }
278 :
279 49 : streamInfo.buffers.clear();
280 : }
281 :
282 208 : m_taskFactory->createStop(m_context, *this)->execute();
283 208 : GstBus *bus = m_gstWrapper->gstPipelineGetBus(GST_PIPELINE(m_context.pipeline));
284 208 : m_gstWrapper->gstBusSetSyncHandler(bus, nullptr, nullptr, nullptr);
285 208 : m_gstWrapper->gstObjectUnref(bus);
286 :
287 208 : if (m_context.source)
288 : {
289 1 : m_gstWrapper->gstObjectUnref(m_context.source);
290 : }
291 208 : if (m_context.subtitleSink)
292 : {
293 4 : m_gstWrapper->gstObjectUnref(m_context.subtitleSink);
294 4 : m_context.subtitleSink = nullptr;
295 : }
296 :
297 208 : if (m_context.videoSink)
298 : {
299 0 : m_gstWrapper->gstObjectUnref(m_context.videoSink);
300 0 : m_context.videoSink = nullptr;
301 : }
302 :
303 : // Delete the pipeline
304 208 : m_gstWrapper->gstObjectUnref(m_context.pipeline);
305 :
306 208 : RIALTO_SERVER_LOG_MIL("RialtoServer's pipeline terminated");
307 : }
308 :
309 833 : unsigned GstGenericPlayer::getGstPlayFlag(const char *nick)
310 : {
311 : GFlagsClass *flagsClass =
312 833 : static_cast<GFlagsClass *>(m_glibWrapper->gTypeClassRef(m_glibWrapper->gTypeFromName("GstPlayFlags")));
313 833 : GFlagsValue *flag = m_glibWrapper->gFlagsGetValueByNick(flagsClass, nick);
314 833 : return flag ? flag->value : 0;
315 : }
316 :
317 1 : void GstGenericPlayer::setupSource(GstElement *pipeline, GstElement *source, GstGenericPlayer *self)
318 : {
319 1 : self->m_gstWrapper->gstObjectRef(source);
320 1 : if (self->m_workerThread)
321 : {
322 1 : self->m_workerThread->enqueueTask(self->m_taskFactory->createSetupSource(self->m_context, *self, source));
323 : }
324 : }
325 :
326 1 : void GstGenericPlayer::setupElement(GstElement *pipeline, GstElement *element, GstGenericPlayer *self)
327 : {
328 1 : RIALTO_SERVER_LOG_DEBUG("Element %s added to the pipeline", GST_ELEMENT_NAME(element));
329 1 : self->m_gstWrapper->gstObjectRef(element);
330 1 : if (self->m_workerThread)
331 : {
332 1 : self->m_workerThread->enqueueTask(self->m_taskFactory->createSetupElement(self->m_context, *self, element));
333 : }
334 : }
335 :
336 1 : void GstGenericPlayer::deepElementAdded(GstBin *pipeline, GstBin *bin, GstElement *element, GstGenericPlayer *self)
337 : {
338 1 : RIALTO_SERVER_LOG_DEBUG("Deep element %s added to the pipeline", GST_ELEMENT_NAME(element));
339 1 : if (self->m_workerThread)
340 : {
341 2 : self->m_workerThread->enqueueTask(
342 2 : self->m_taskFactory->createDeepElementAdded(self->m_context, *self, pipeline, bin, element));
343 : }
344 1 : }
345 :
346 1 : void GstGenericPlayer::attachSource(const std::unique_ptr<IMediaPipeline::MediaSource> &attachedSource)
347 : {
348 1 : if (m_workerThread)
349 : {
350 1 : m_workerThread->enqueueTask(m_taskFactory->createAttachSource(m_context, *this, attachedSource));
351 : }
352 : }
353 :
354 1 : void GstGenericPlayer::removeSource(const MediaSourceType &mediaSourceType)
355 : {
356 1 : if (m_workerThread)
357 : {
358 1 : m_workerThread->enqueueTask(m_taskFactory->createRemoveSource(m_context, *this, mediaSourceType));
359 : }
360 : }
361 :
362 2 : void GstGenericPlayer::allSourcesAttached()
363 : {
364 2 : if (m_workerThread)
365 : {
366 2 : m_workerThread->enqueueTask(m_taskFactory->createFinishSetupSource(m_context, *this));
367 : }
368 : }
369 :
370 1 : void GstGenericPlayer::attachSamples(const IMediaPipeline::MediaSegmentVector &mediaSegments)
371 : {
372 1 : if (m_workerThread)
373 : {
374 1 : m_workerThread->enqueueTask(m_taskFactory->createAttachSamples(m_context, *this, mediaSegments));
375 : }
376 : }
377 :
378 1 : void GstGenericPlayer::attachSamples(const std::shared_ptr<IDataReader> &dataReader)
379 : {
380 1 : if (m_workerThread)
381 : {
382 1 : m_workerThread->enqueueTask(m_taskFactory->createReadShmDataAndAttachSamples(m_context, *this, dataReader));
383 : }
384 : }
385 :
386 1 : void GstGenericPlayer::setPosition(std::int64_t position)
387 : {
388 1 : if (m_workerThread)
389 : {
390 1 : m_workerThread->enqueueTask(m_taskFactory->createSetPosition(m_context, *this, position));
391 : }
392 : }
393 :
394 1 : void GstGenericPlayer::setPlaybackRate(double rate)
395 : {
396 1 : if (m_workerThread)
397 : {
398 1 : m_workerThread->enqueueTask(m_taskFactory->createSetPlaybackRate(m_context, rate));
399 : }
400 : }
401 :
402 5 : bool GstGenericPlayer::getPosition(std::int64_t &position)
403 : {
404 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
405 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
406 5 : position = getPosition(m_context.pipeline);
407 5 : if (position == -1)
408 : {
409 3 : RIALTO_SERVER_LOG_WARN("Query position failed");
410 3 : return false;
411 : }
412 :
413 2 : return true;
414 : }
415 :
416 38 : GstElement *GstGenericPlayer::getSink(const MediaSourceType &mediaSourceType) const
417 : {
418 38 : const char *kSinkName{nullptr};
419 38 : GstElement *sink{nullptr};
420 38 : switch (mediaSourceType)
421 : {
422 18 : case MediaSourceType::AUDIO:
423 18 : kSinkName = "audio-sink";
424 18 : break;
425 18 : case MediaSourceType::VIDEO:
426 18 : kSinkName = "video-sink";
427 18 : break;
428 2 : default:
429 2 : break;
430 : }
431 38 : if (!kSinkName)
432 : {
433 2 : RIALTO_SERVER_LOG_WARN("mediaSourceType not supported %d", static_cast<int>(mediaSourceType));
434 : }
435 : else
436 : {
437 36 : if (m_context.pipeline == nullptr)
438 : {
439 0 : RIALTO_SERVER_LOG_WARN("Pipeline is NULL!");
440 : }
441 : else
442 : {
443 36 : RIALTO_SERVER_LOG_DEBUG("Pipeline is valid: %p", m_context.pipeline);
444 : }
445 36 : m_glibWrapper->gObjectGet(m_context.pipeline, kSinkName, &sink, nullptr);
446 36 : if (sink)
447 : {
448 25 : GstElement *autoSink{sink};
449 25 : if (firebolt::rialto::MediaSourceType::VIDEO == mediaSourceType)
450 14 : autoSink = getSinkChildIfAutoVideoSink(sink);
451 11 : else if (firebolt::rialto::MediaSourceType::AUDIO == mediaSourceType)
452 11 : autoSink = getSinkChildIfAutoAudioSink(sink);
453 :
454 : // Is this an auto-sink?...
455 25 : if (autoSink != sink)
456 : {
457 2 : m_gstWrapper->gstObjectUnref(GST_OBJECT(sink));
458 :
459 : // increase the reference count of the auto sink
460 2 : sink = GST_ELEMENT(m_gstWrapper->gstObjectRef(GST_OBJECT(autoSink)));
461 : }
462 : }
463 : else
464 : {
465 11 : RIALTO_SERVER_LOG_WARN("%s could not be obtained", kSinkName);
466 : }
467 : }
468 38 : return sink;
469 : }
470 :
471 1 : void GstGenericPlayer::setSourceFlushed(const MediaSourceType &mediaSourceType)
472 : {
473 1 : m_flushWatcher->setFlushed(mediaSourceType);
474 : }
475 :
476 1 : void GstGenericPlayer::postponeFlush(const MediaSourceType &mediaSourceType, bool resetTime)
477 : {
478 1 : m_postponedFlushes.emplace_back(std::make_pair(mediaSourceType, resetTime));
479 : }
480 :
481 1 : void GstGenericPlayer::executePostponedFlushes()
482 : {
483 1 : if (m_workerThread)
484 : {
485 2 : for (const auto &[mediaSourceType, resetTime] : m_postponedFlushes)
486 : {
487 1 : m_workerThread->enqueueTask(m_taskFactory->createFlush(m_context, *this, mediaSourceType, resetTime));
488 : }
489 : }
490 1 : m_postponedFlushes.clear();
491 : }
492 :
493 19 : GstElement *GstGenericPlayer::getDecoder(const MediaSourceType &mediaSourceType)
494 : {
495 19 : GstIterator *it = m_gstWrapper->gstBinIterateRecurse(GST_BIN(m_context.pipeline));
496 19 : GValue item = G_VALUE_INIT;
497 19 : gboolean done = FALSE;
498 :
499 28 : while (!done)
500 : {
501 21 : switch (m_gstWrapper->gstIteratorNext(it, &item))
502 : {
503 12 : case GST_ITERATOR_OK:
504 : {
505 12 : GstElement *element = GST_ELEMENT(m_glibWrapper->gValueGetObject(&item));
506 12 : GstElementFactory *factory = m_gstWrapper->gstElementGetFactory(element);
507 :
508 12 : if (factory)
509 : {
510 12 : GstElementFactoryListType type = GST_ELEMENT_FACTORY_TYPE_DECODER;
511 12 : if (mediaSourceType == MediaSourceType::AUDIO)
512 : {
513 12 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO;
514 : }
515 0 : else if (mediaSourceType == MediaSourceType::VIDEO)
516 : {
517 0 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO;
518 : }
519 :
520 12 : if (m_gstWrapper->gstElementFactoryListIsType(factory, type))
521 : {
522 12 : m_glibWrapper->gValueUnset(&item);
523 12 : m_gstWrapper->gstIteratorFree(it);
524 12 : return GST_ELEMENT(m_gstWrapper->gstObjectRef(element));
525 : }
526 : }
527 :
528 0 : m_glibWrapper->gValueUnset(&item);
529 0 : break;
530 : }
531 2 : case GST_ITERATOR_RESYNC:
532 2 : m_gstWrapper->gstIteratorResync(it);
533 2 : break;
534 7 : case GST_ITERATOR_ERROR:
535 : case GST_ITERATOR_DONE:
536 7 : done = TRUE;
537 7 : break;
538 : }
539 : }
540 :
541 7 : RIALTO_SERVER_LOG_WARN("Could not find decoder");
542 :
543 7 : m_glibWrapper->gValueUnset(&item);
544 7 : m_gstWrapper->gstIteratorFree(it);
545 :
546 7 : return nullptr;
547 : }
548 :
549 3 : GstElement *GstGenericPlayer::getParser(const MediaSourceType &mediaSourceType)
550 : {
551 3 : GstIterator *it = m_gstWrapper->gstBinIterateRecurse(GST_BIN(m_context.pipeline));
552 3 : GValue item = G_VALUE_INIT;
553 3 : gboolean done = FALSE;
554 :
555 4 : while (!done)
556 : {
557 3 : switch (m_gstWrapper->gstIteratorNext(it, &item))
558 : {
559 2 : case GST_ITERATOR_OK:
560 : {
561 2 : GstElement *element = GST_ELEMENT(m_glibWrapper->gValueGetObject(&item));
562 2 : GstElementFactory *factory = m_gstWrapper->gstElementGetFactory(element);
563 :
564 2 : if (factory)
565 : {
566 2 : GstElementFactoryListType type = GST_ELEMENT_FACTORY_TYPE_PARSER;
567 2 : if (mediaSourceType == MediaSourceType::AUDIO)
568 : {
569 0 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO;
570 : }
571 2 : else if (mediaSourceType == MediaSourceType::VIDEO)
572 : {
573 2 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO;
574 : }
575 :
576 2 : if (m_gstWrapper->gstElementFactoryListIsType(factory, type))
577 : {
578 2 : m_glibWrapper->gValueUnset(&item);
579 2 : m_gstWrapper->gstIteratorFree(it);
580 2 : return GST_ELEMENT(m_gstWrapper->gstObjectRef(element));
581 : }
582 : }
583 :
584 0 : m_glibWrapper->gValueUnset(&item);
585 0 : break;
586 : }
587 0 : case GST_ITERATOR_RESYNC:
588 0 : m_gstWrapper->gstIteratorResync(it);
589 0 : break;
590 1 : case GST_ITERATOR_ERROR:
591 : case GST_ITERATOR_DONE:
592 1 : done = TRUE;
593 1 : break;
594 : }
595 : }
596 :
597 1 : RIALTO_SERVER_LOG_WARN("Could not find parser");
598 :
599 1 : m_glibWrapper->gValueUnset(&item);
600 1 : m_gstWrapper->gstIteratorFree(it);
601 :
602 1 : return nullptr;
603 : }
604 :
605 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate>
606 5 : GstGenericPlayer::createAudioAttributes(const std::unique_ptr<IMediaPipeline::MediaSource> &source) const
607 : {
608 5 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate> audioAttributes;
609 5 : const IMediaPipeline::MediaSourceAudio *kSource = dynamic_cast<IMediaPipeline::MediaSourceAudio *>(source.get());
610 5 : if (kSource)
611 : {
612 4 : firebolt::rialto::AudioConfig audioConfig = kSource->getAudioConfig();
613 : audioAttributes =
614 12 : firebolt::rialto::wrappers::AudioAttributesPrivate{"", // param set below.
615 4 : audioConfig.numberOfChannels, audioConfig.sampleRate,
616 : 0, // used only in one of logs in rdk_gstreamer_utils, no
617 : // need to set this param.
618 : 0, // used only in one of logs in rdk_gstreamer_utils, no
619 : // need to set this param.
620 4 : audioConfig.codecSpecificConfig.data(),
621 : static_cast<std::uint32_t>(
622 4 : audioConfig.codecSpecificConfig.size())};
623 4 : if (source->getMimeType() == "audio/mp4" || source->getMimeType() == "audio/aac")
624 : {
625 2 : audioAttributes->m_codecParam = "mp4a";
626 : }
627 2 : else if (source->getMimeType() == "audio/x-eac3")
628 : {
629 1 : audioAttributes->m_codecParam = "ec-3";
630 : }
631 1 : else if (source->getMimeType() == "audio/b-wav" || source->getMimeType() == "audio/x-raw")
632 : {
633 1 : audioAttributes->m_codecParam = "lpcm";
634 : }
635 4 : }
636 : else
637 : {
638 1 : RIALTO_SERVER_LOG_ERROR("Failed to cast source");
639 : }
640 :
641 5 : return audioAttributes;
642 : }
643 :
644 1 : bool GstGenericPlayer::setImmediateOutput(const MediaSourceType &mediaSourceType, bool immediateOutputParam)
645 : {
646 1 : if (!m_workerThread)
647 0 : return false;
648 :
649 2 : m_workerThread->enqueueTask(
650 2 : m_taskFactory->createSetImmediateOutput(m_context, *this, mediaSourceType, immediateOutputParam));
651 1 : return true;
652 : }
653 :
654 5 : bool GstGenericPlayer::getImmediateOutput(const MediaSourceType &mediaSourceType, bool &immediateOutputRef)
655 : {
656 5 : bool returnValue{false};
657 5 : GstElement *sink{getSink(mediaSourceType)};
658 5 : if (sink)
659 : {
660 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "immediate-output"))
661 : {
662 2 : m_glibWrapper->gObjectGet(sink, "immediate-output", &immediateOutputRef, nullptr);
663 2 : returnValue = true;
664 : }
665 : else
666 : {
667 1 : RIALTO_SERVER_LOG_ERROR("immediate-output not supported in element %s", GST_ELEMENT_NAME(sink));
668 : }
669 3 : m_gstWrapper->gstObjectUnref(sink);
670 : }
671 : else
672 : {
673 2 : RIALTO_SERVER_LOG_ERROR("Failed to set immediate-output property, sink is NULL");
674 : }
675 :
676 5 : return returnValue;
677 : }
678 :
679 5 : bool GstGenericPlayer::getStats(const MediaSourceType &mediaSourceType, uint64_t &renderedFrames, uint64_t &droppedFrames)
680 : {
681 5 : bool returnValue{false};
682 5 : GstElement *sink{getSink(mediaSourceType)};
683 5 : if (sink)
684 : {
685 3 : GstStructure *stats{nullptr};
686 3 : m_glibWrapper->gObjectGet(sink, "stats", &stats, nullptr);
687 3 : if (!stats)
688 : {
689 1 : RIALTO_SERVER_LOG_ERROR("failed to get stats from '%s'", GST_ELEMENT_NAME(sink));
690 : }
691 : else
692 : {
693 : guint64 renderedFramesTmp;
694 : guint64 droppedFramesTmp;
695 3 : if (m_gstWrapper->gstStructureGetUint64(stats, "rendered", &renderedFramesTmp) &&
696 1 : m_gstWrapper->gstStructureGetUint64(stats, "dropped", &droppedFramesTmp))
697 : {
698 1 : renderedFrames = renderedFramesTmp;
699 1 : droppedFrames = droppedFramesTmp;
700 1 : returnValue = true;
701 : }
702 : else
703 : {
704 1 : RIALTO_SERVER_LOG_ERROR("failed to get 'rendered' or 'dropped' from structure (%s)",
705 : GST_ELEMENT_NAME(sink));
706 : }
707 2 : m_gstWrapper->gstStructureFree(stats);
708 : }
709 3 : m_gstWrapper->gstObjectUnref(sink);
710 : }
711 :
712 5 : return returnValue;
713 : }
714 :
715 4 : GstBuffer *GstGenericPlayer::createBuffer(const IMediaPipeline::MediaSegment &mediaSegment) const
716 : {
717 4 : GstBuffer *gstBuffer = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getDataLength(), nullptr);
718 4 : m_gstWrapper->gstBufferFill(gstBuffer, 0, mediaSegment.getData(), mediaSegment.getDataLength());
719 :
720 4 : if (mediaSegment.isEncrypted())
721 : {
722 3 : GstBuffer *keyId = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getKeyId().size(), nullptr);
723 3 : m_gstWrapper->gstBufferFill(keyId, 0, mediaSegment.getKeyId().data(), mediaSegment.getKeyId().size());
724 :
725 3 : GstBuffer *initVector = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getInitVector().size(), nullptr);
726 6 : m_gstWrapper->gstBufferFill(initVector, 0, mediaSegment.getInitVector().data(),
727 3 : mediaSegment.getInitVector().size());
728 3 : GstBuffer *subsamples{nullptr};
729 3 : if (!mediaSegment.getSubSamples().empty())
730 : {
731 3 : auto subsamplesRawSize = mediaSegment.getSubSamples().size() * (sizeof(guint16) + sizeof(guint32));
732 3 : guint8 *subsamplesRaw = static_cast<guint8 *>(m_glibWrapper->gMalloc(subsamplesRawSize));
733 : GstByteWriter writer;
734 3 : m_gstWrapper->gstByteWriterInitWithData(&writer, subsamplesRaw, subsamplesRawSize, FALSE);
735 :
736 6 : for (const auto &subSample : mediaSegment.getSubSamples())
737 : {
738 3 : m_gstWrapper->gstByteWriterPutUint16Be(&writer, subSample.numClearBytes);
739 3 : m_gstWrapper->gstByteWriterPutUint32Be(&writer, subSample.numEncryptedBytes);
740 : }
741 3 : subsamples = m_gstWrapper->gstBufferNewWrapped(subsamplesRaw, subsamplesRawSize);
742 : }
743 :
744 3 : uint32_t crypt = 0;
745 3 : uint32_t skip = 0;
746 3 : bool encryptionPatternSet = mediaSegment.getEncryptionPattern(crypt, skip);
747 :
748 3 : GstRialtoProtectionData data = {mediaSegment.getMediaKeySessionId(),
749 3 : static_cast<uint32_t>(mediaSegment.getSubSamples().size()),
750 3 : mediaSegment.getInitWithLast15(),
751 : keyId,
752 : initVector,
753 : subsamples,
754 6 : mediaSegment.getCipherMode(),
755 : crypt,
756 : skip,
757 : encryptionPatternSet,
758 6 : m_context.decryptionService};
759 :
760 3 : if (!m_protectionMetadataWrapper->addProtectionMetadata(gstBuffer, data))
761 : {
762 1 : RIALTO_SERVER_LOG_ERROR("Failed to add protection metadata");
763 1 : if (keyId)
764 : {
765 1 : m_gstWrapper->gstBufferUnref(keyId);
766 : }
767 1 : if (initVector)
768 : {
769 1 : m_gstWrapper->gstBufferUnref(initVector);
770 : }
771 1 : if (subsamples)
772 : {
773 1 : m_gstWrapper->gstBufferUnref(subsamples);
774 : }
775 : }
776 : }
777 :
778 4 : GST_BUFFER_TIMESTAMP(gstBuffer) = mediaSegment.getTimeStamp();
779 4 : GST_BUFFER_DURATION(gstBuffer) = mediaSegment.getDuration();
780 4 : return gstBuffer;
781 : }
782 :
783 4 : void GstGenericPlayer::notifyNeedMediaData(const MediaSourceType mediaSource)
784 : {
785 4 : auto elem = m_context.streamInfo.find(mediaSource);
786 4 : if (elem != m_context.streamInfo.end())
787 : {
788 2 : StreamInfo &streamInfo = elem->second;
789 2 : streamInfo.isNeedDataPending = false;
790 :
791 : // Send new NeedMediaData if we still need it
792 2 : if (m_gstPlayerClient && streamInfo.isDataNeeded)
793 : {
794 2 : streamInfo.isNeedDataPending = m_gstPlayerClient->notifyNeedMediaData(mediaSource);
795 : }
796 : }
797 : else
798 : {
799 2 : RIALTO_SERVER_LOG_WARN("Media type %s could not be found", common::convertMediaSourceType(mediaSource));
800 : }
801 4 : }
802 :
803 19 : void GstGenericPlayer::attachData(const firebolt::rialto::MediaSourceType mediaType)
804 : {
805 19 : auto elem = m_context.streamInfo.find(mediaType);
806 19 : if (elem != m_context.streamInfo.end())
807 : {
808 16 : StreamInfo &streamInfo = elem->second;
809 16 : if (streamInfo.buffers.empty() || !streamInfo.isDataNeeded)
810 : {
811 2 : return;
812 : }
813 :
814 14 : if (firebolt::rialto::MediaSourceType::SUBTITLE == mediaType)
815 : {
816 2 : setTextTrackPositionIfRequired(streamInfo.appSrc);
817 : }
818 : else
819 : {
820 36 : pushSampleIfRequired(streamInfo.appSrc, common::convertMediaSourceType(mediaType));
821 : }
822 14 : if (mediaType == firebolt::rialto::MediaSourceType::AUDIO)
823 : {
824 : // This needs to be done before gstAppSrcPushBuffer() is
825 : // called because it can free the memory
826 7 : m_context.lastAudioSampleTimestamps = static_cast<int64_t>(GST_BUFFER_PTS(streamInfo.buffers.back()));
827 : }
828 :
829 28 : for (GstBuffer *buffer : streamInfo.buffers)
830 : {
831 14 : m_gstWrapper->gstAppSrcPushBuffer(GST_APP_SRC(streamInfo.appSrc), buffer);
832 : }
833 14 : streamInfo.buffers.clear();
834 14 : streamInfo.isDataPushed = true;
835 :
836 14 : const bool kIsSingle = m_context.streamInfo.size() == 1;
837 14 : bool allOtherStreamsPushed = std::all_of(m_context.streamInfo.begin(), m_context.streamInfo.end(),
838 15 : [](const auto &entry) { return entry.second.isDataPushed; });
839 :
840 14 : if (!m_context.bufferedNotificationSent && (allOtherStreamsPushed || kIsSingle) && m_gstPlayerClient)
841 : {
842 1 : m_context.bufferedNotificationSent = true;
843 1 : m_gstPlayerClient->notifyNetworkState(NetworkState::BUFFERED);
844 1 : RIALTO_SERVER_LOG_MIL("Buffered NetworkState reached");
845 : }
846 14 : cancelUnderflow(mediaType);
847 :
848 14 : const auto eosInfoIt = m_context.endOfStreamInfo.find(mediaType);
849 14 : if (eosInfoIt != m_context.endOfStreamInfo.end() && eosInfoIt->second == EosState::PENDING)
850 : {
851 0 : setEos(mediaType);
852 : }
853 : }
854 : }
855 :
856 7 : void GstGenericPlayer::updateAudioCaps(int32_t rate, int32_t channels, const std::shared_ptr<CodecData> &codecData)
857 : {
858 7 : auto elem = m_context.streamInfo.find(firebolt::rialto::MediaSourceType::AUDIO);
859 7 : if (elem != m_context.streamInfo.end())
860 : {
861 6 : StreamInfo &streamInfo = elem->second;
862 :
863 6 : constexpr int kInvalidRate{0}, kInvalidChannels{0};
864 6 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(streamInfo.appSrc));
865 6 : GstCaps *newCaps = m_gstWrapper->gstCapsCopy(currentCaps);
866 :
867 6 : if (rate != kInvalidRate)
868 : {
869 3 : m_gstWrapper->gstCapsSetSimple(newCaps, "rate", G_TYPE_INT, rate, NULL);
870 : }
871 :
872 6 : if (channels != kInvalidChannels)
873 : {
874 3 : m_gstWrapper->gstCapsSetSimple(newCaps, "channels", G_TYPE_INT, channels, NULL);
875 : }
876 :
877 6 : setCodecData(newCaps, codecData);
878 :
879 6 : if (!m_gstWrapper->gstCapsIsEqual(currentCaps, newCaps))
880 : {
881 5 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(streamInfo.appSrc), newCaps);
882 : }
883 :
884 6 : m_gstWrapper->gstCapsUnref(newCaps);
885 6 : m_gstWrapper->gstCapsUnref(currentCaps);
886 : }
887 7 : }
888 :
889 8 : void GstGenericPlayer::updateVideoCaps(int32_t width, int32_t height, Fraction frameRate,
890 : const std::shared_ptr<CodecData> &codecData)
891 : {
892 8 : auto elem = m_context.streamInfo.find(firebolt::rialto::MediaSourceType::VIDEO);
893 8 : if (elem != m_context.streamInfo.end())
894 : {
895 7 : StreamInfo &streamInfo = elem->second;
896 :
897 7 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(streamInfo.appSrc));
898 7 : GstCaps *newCaps = m_gstWrapper->gstCapsCopy(currentCaps);
899 :
900 7 : if (width > 0)
901 : {
902 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "width", G_TYPE_INT, width, NULL);
903 : }
904 :
905 7 : if (height > 0)
906 : {
907 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "height", G_TYPE_INT, height, NULL);
908 : }
909 :
910 7 : if ((kUndefinedSize != frameRate.numerator) && (kUndefinedSize != frameRate.denominator))
911 : {
912 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "framerate", GST_TYPE_FRACTION, frameRate.numerator,
913 : frameRate.denominator, NULL);
914 : }
915 :
916 7 : setCodecData(newCaps, codecData);
917 :
918 7 : if (!m_gstWrapper->gstCapsIsEqual(currentCaps, newCaps))
919 : {
920 6 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(streamInfo.appSrc), newCaps);
921 : }
922 :
923 7 : m_gstWrapper->gstCapsUnref(currentCaps);
924 7 : m_gstWrapper->gstCapsUnref(newCaps);
925 : }
926 8 : }
927 :
928 5 : void GstGenericPlayer::addAudioClippingToBuffer(GstBuffer *buffer, uint64_t clippingStart, uint64_t clippingEnd) const
929 : {
930 5 : if (clippingStart || clippingEnd)
931 : {
932 4 : if (m_gstWrapper->gstBufferAddAudioClippingMeta(buffer, GST_FORMAT_TIME, clippingStart, clippingEnd))
933 : {
934 3 : RIALTO_SERVER_LOG_DEBUG("Added audio clipping to buffer %p, start: %" PRIu64 ", end %" PRIu64, buffer,
935 : clippingStart, clippingEnd);
936 : }
937 : else
938 : {
939 1 : RIALTO_SERVER_LOG_WARN("Failed to add audio clipping to buffer %p, start: %" PRIu64 ", end %" PRIu64,
940 : buffer, clippingStart, clippingEnd);
941 : }
942 : }
943 5 : }
944 :
945 13 : bool GstGenericPlayer::setCodecData(GstCaps *caps, const std::shared_ptr<CodecData> &codecData) const
946 : {
947 13 : if (codecData && CodecDataType::BUFFER == codecData->type)
948 : {
949 7 : gpointer memory = m_glibWrapper->gMemdup(codecData->data.data(), codecData->data.size());
950 7 : GstBuffer *buf = m_gstWrapper->gstBufferNewWrapped(memory, codecData->data.size());
951 7 : m_gstWrapper->gstCapsSetSimple(caps, "codec_data", GST_TYPE_BUFFER, buf, nullptr);
952 7 : m_gstWrapper->gstBufferUnref(buf);
953 7 : return true;
954 : }
955 6 : if (codecData && CodecDataType::STRING == codecData->type)
956 : {
957 2 : std::string codecDataStr(codecData->data.begin(), codecData->data.end());
958 2 : m_gstWrapper->gstCapsSetSimple(caps, "codec_data", G_TYPE_STRING, codecDataStr.c_str(), nullptr);
959 2 : return true;
960 : }
961 4 : return false;
962 : }
963 :
964 12 : void GstGenericPlayer::pushSampleIfRequired(GstElement *source, const std::string &typeStr)
965 : {
966 12 : auto initialPosition = m_context.initialPositions.find(source);
967 12 : if (m_context.initialPositions.end() == initialPosition)
968 : {
969 : // Sending initial sample not needed
970 7 : return;
971 : }
972 : // GstAppSrc does not replace segment, if it's the same as previous one.
973 : // It causes problems with position reporing in amlogic devices, so we need to push
974 : // two segments with different reset time value.
975 5 : pushAdditionalSegmentIfRequired(source);
976 :
977 10 : for (const auto &[position, resetTime, appliedRate, stopPosition] : initialPosition->second)
978 : {
979 6 : GstSeekFlags seekFlag = resetTime ? GST_SEEK_FLAG_FLUSH : GST_SEEK_FLAG_NONE;
980 6 : RIALTO_SERVER_LOG_DEBUG("Pushing new %s sample...", typeStr.c_str());
981 6 : GstSegment *segment{m_gstWrapper->gstSegmentNew()};
982 6 : m_gstWrapper->gstSegmentInit(segment, GST_FORMAT_TIME);
983 6 : if (!m_gstWrapper->gstSegmentDoSeek(segment, m_context.playbackRate, GST_FORMAT_TIME, seekFlag,
984 : GST_SEEK_TYPE_SET, position, GST_SEEK_TYPE_SET, stopPosition, nullptr))
985 : {
986 1 : RIALTO_SERVER_LOG_WARN("Segment seek failed.");
987 1 : m_gstWrapper->gstSegmentFree(segment);
988 1 : m_context.initialPositions.erase(initialPosition);
989 1 : return;
990 : }
991 5 : segment->applied_rate = appliedRate;
992 5 : RIALTO_SERVER_LOG_MIL("New %s segment: [%" GST_TIME_FORMAT ", %" GST_TIME_FORMAT
993 : "], rate: %f, appliedRate %f, reset_time: %d\n",
994 : typeStr.c_str(), GST_TIME_ARGS(segment->start), GST_TIME_ARGS(segment->stop),
995 : segment->rate, segment->applied_rate, resetTime);
996 :
997 5 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(source));
998 : // We can't pass buffer in GstSample, because implementation of gst_app_src_push_sample
999 : // uses gst_buffer_copy, which loses RialtoProtectionMeta (that causes problems with EME
1000 : // for first frame).
1001 5 : GstSample *sample = m_gstWrapper->gstSampleNew(nullptr, currentCaps, segment, nullptr);
1002 5 : m_gstWrapper->gstAppSrcPushSample(GST_APP_SRC(source), sample);
1003 5 : m_gstWrapper->gstSampleUnref(sample);
1004 5 : m_gstWrapper->gstCapsUnref(currentCaps);
1005 :
1006 5 : m_gstWrapper->gstSegmentFree(segment);
1007 : }
1008 4 : m_context.currentPosition[source] = initialPosition->second.back();
1009 4 : m_context.initialPositions.erase(initialPosition);
1010 4 : return;
1011 : }
1012 :
1013 5 : void GstGenericPlayer::pushAdditionalSegmentIfRequired(GstElement *source)
1014 : {
1015 5 : auto currentPosition = m_context.currentPosition.find(source);
1016 5 : if (m_context.currentPosition.end() == currentPosition)
1017 : {
1018 4 : return;
1019 : }
1020 1 : auto initialPosition = m_context.initialPositions.find(source);
1021 1 : if (m_context.initialPositions.end() == initialPosition)
1022 : {
1023 0 : return;
1024 : }
1025 2 : if (initialPosition->second.size() == 1 && initialPosition->second.back().resetTime &&
1026 1 : currentPosition->second == initialPosition->second.back())
1027 : {
1028 1 : RIALTO_SERVER_LOG_INFO("Adding additional segment with reset_time = false");
1029 1 : SegmentData additionalSegment = initialPosition->second.back();
1030 1 : additionalSegment.resetTime = false;
1031 1 : initialPosition->second.push_back(additionalSegment);
1032 : }
1033 : }
1034 :
1035 2 : void GstGenericPlayer::setTextTrackPositionIfRequired(GstElement *source)
1036 : {
1037 2 : auto initialPosition = m_context.initialPositions.find(source);
1038 2 : if (m_context.initialPositions.end() == initialPosition)
1039 : {
1040 : // Sending initial sample not needed
1041 1 : return;
1042 : }
1043 :
1044 1 : RIALTO_SERVER_LOG_MIL("New subtitle position set %" GST_TIME_FORMAT,
1045 : GST_TIME_ARGS(initialPosition->second.back().position));
1046 1 : m_glibWrapper->gObjectSet(m_context.subtitleSink, "position",
1047 1 : static_cast<guint64>(initialPosition->second.back().position), nullptr);
1048 :
1049 1 : m_context.initialPositions.erase(initialPosition);
1050 : }
1051 :
1052 7 : bool GstGenericPlayer::reattachSource(const std::unique_ptr<IMediaPipeline::MediaSource> &source)
1053 : {
1054 7 : if (m_context.streamInfo.find(source->getType()) == m_context.streamInfo.end())
1055 : {
1056 1 : RIALTO_SERVER_LOG_ERROR("Unable to switch source, type does not exist");
1057 1 : return false;
1058 : }
1059 6 : if (source->getMimeType().empty())
1060 : {
1061 1 : RIALTO_SERVER_LOG_WARN("Skip switch audio source. Unknown mime type");
1062 1 : return false;
1063 : }
1064 5 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate> audioAttributes{createAudioAttributes(source)};
1065 5 : if (!audioAttributes)
1066 : {
1067 1 : RIALTO_SERVER_LOG_ERROR("Failed to create audio attributes");
1068 1 : return false;
1069 : }
1070 :
1071 4 : long long currentDispPts = getPosition(m_context.pipeline); // NOLINT(runtime/int)
1072 4 : GstCaps *caps{createCapsFromMediaSource(m_gstWrapper, m_glibWrapper, source)};
1073 4 : GstAppSrc *appSrc{GST_APP_SRC(m_context.streamInfo[source->getType()].appSrc)};
1074 4 : GstCaps *oldCaps = m_gstWrapper->gstAppSrcGetCaps(appSrc);
1075 4 : if ((!oldCaps) || (!m_gstWrapper->gstCapsIsEqual(caps, oldCaps)))
1076 : {
1077 3 : RIALTO_SERVER_LOG_DEBUG("Caps not equal. Perform audio track codec channel switch.");
1078 3 : int sampleAttributes{
1079 : 0}; // rdk_gstreamer_utils::performAudioTrackCodecChannelSwitch checks if this param != NULL only.
1080 3 : std::uint32_t status{0}; // must be 0 to make rdk_gstreamer_utils::performAudioTrackCodecChannelSwitch work
1081 3 : unsigned int ui32Delay{0}; // output param
1082 3 : long long audioChangeTargetPts{-1}; // NOLINT(runtime/int) output param. Set audioChangeTargetPts =
1083 : // currentDispPts in rdk_gstreamer_utils function stub
1084 3 : unsigned int audioChangeStage{0}; // Output param. Set to AUDCHG_ALIGN in rdk_gstreamer_utils function stub
1085 3 : gchar *oldCapsCStr = m_gstWrapper->gstCapsToString(oldCaps);
1086 3 : std::string oldCapsStr = std::string(oldCapsCStr);
1087 3 : m_glibWrapper->gFree(oldCapsCStr);
1088 3 : bool audioAac{oldCapsStr.find("audio/mpeg") != std::string::npos};
1089 3 : bool svpEnabled{true}; // assume always true
1090 3 : bool retVal{false}; // Output param. Set to TRUE in rdk_gstreamer_utils function stub
1091 : bool result =
1092 3 : m_rdkGstreamerUtilsWrapper
1093 6 : ->performAudioTrackCodecChannelSwitch(&m_context.playbackGroup, &sampleAttributes, &(*audioAttributes),
1094 : &status, &ui32Delay, &audioChangeTargetPts, ¤tDispPts,
1095 : &audioChangeStage,
1096 : &caps, // may fail for amlogic - that implementation changes
1097 : // this parameter, it's probably used by Netflix later
1098 3 : &audioAac, svpEnabled, GST_ELEMENT(appSrc), &retVal);
1099 :
1100 3 : if (!result || !retVal)
1101 : {
1102 3 : RIALTO_SERVER_LOG_WARN("performAudioTrackCodecChannelSwitch failed! Result: %d, retval %d", result, retVal);
1103 : }
1104 : }
1105 : else
1106 : {
1107 1 : RIALTO_SERVER_LOG_DEBUG("Skip switching audio source - caps are the same.");
1108 : }
1109 :
1110 4 : m_context.lastAudioSampleTimestamps = currentDispPts;
1111 4 : if (caps)
1112 4 : m_gstWrapper->gstCapsUnref(caps);
1113 4 : if (oldCaps)
1114 4 : m_gstWrapper->gstCapsUnref(oldCaps);
1115 :
1116 4 : return true;
1117 5 : }
1118 :
1119 0 : bool GstGenericPlayer::hasSourceType(const MediaSourceType &mediaSourceType) const
1120 : {
1121 0 : return m_context.streamInfo.find(mediaSourceType) != m_context.streamInfo.end();
1122 : }
1123 :
1124 88 : void GstGenericPlayer::scheduleNeedMediaData(GstAppSrc *src)
1125 : {
1126 88 : if (m_workerThread)
1127 : {
1128 88 : m_workerThread->enqueueTask(m_taskFactory->createNeedData(m_context, *this, src));
1129 : }
1130 : }
1131 :
1132 1 : void GstGenericPlayer::scheduleEnoughData(GstAppSrc *src)
1133 : {
1134 1 : if (m_workerThread)
1135 : {
1136 1 : m_workerThread->enqueueTask(m_taskFactory->createEnoughData(m_context, src));
1137 : }
1138 : }
1139 :
1140 3 : void GstGenericPlayer::scheduleAudioUnderflow()
1141 : {
1142 3 : if (m_workerThread)
1143 : {
1144 3 : bool underflowEnabled = m_context.isPlaying && !m_context.audioSourceRemoved;
1145 6 : m_workerThread->enqueueTask(
1146 6 : m_taskFactory->createUnderflow(m_context, *this, underflowEnabled, MediaSourceType::AUDIO));
1147 : }
1148 3 : }
1149 :
1150 2 : void GstGenericPlayer::scheduleVideoUnderflow()
1151 : {
1152 2 : if (m_workerThread)
1153 : {
1154 2 : bool underflowEnabled = m_context.isPlaying;
1155 4 : m_workerThread->enqueueTask(
1156 4 : m_taskFactory->createUnderflow(m_context, *this, underflowEnabled, MediaSourceType::VIDEO));
1157 : }
1158 2 : }
1159 :
1160 1 : void GstGenericPlayer::scheduleAllSourcesAttached()
1161 : {
1162 1 : allSourcesAttached();
1163 : }
1164 :
1165 14 : void GstGenericPlayer::cancelUnderflow(firebolt::rialto::MediaSourceType mediaSource)
1166 : {
1167 14 : auto elem = m_context.streamInfo.find(mediaSource);
1168 14 : if (elem != m_context.streamInfo.end())
1169 : {
1170 14 : StreamInfo &streamInfo = elem->second;
1171 14 : if (!streamInfo.underflowOccured)
1172 : {
1173 11 : return;
1174 : }
1175 :
1176 3 : RIALTO_SERVER_LOG_DEBUG("Cancelling %s underflow", common::convertMediaSourceType(mediaSource));
1177 3 : streamInfo.underflowOccured = false;
1178 : }
1179 : }
1180 :
1181 1 : void GstGenericPlayer::play()
1182 : {
1183 1 : if (m_workerThread)
1184 : {
1185 1 : m_workerThread->enqueueTask(m_taskFactory->createPlay(*this));
1186 : }
1187 : }
1188 :
1189 1 : void GstGenericPlayer::pause()
1190 : {
1191 1 : if (m_workerThread)
1192 : {
1193 1 : m_workerThread->enqueueTask(m_taskFactory->createPause(m_context, *this));
1194 : }
1195 : }
1196 :
1197 1 : void GstGenericPlayer::stop()
1198 : {
1199 1 : if (m_workerThread)
1200 : {
1201 1 : m_workerThread->enqueueTask(m_taskFactory->createStop(m_context, *this));
1202 : }
1203 : }
1204 :
1205 4 : bool GstGenericPlayer::changePipelineState(GstState newState)
1206 : {
1207 4 : if (!m_context.pipeline)
1208 : {
1209 1 : RIALTO_SERVER_LOG_ERROR("Change state failed - pipeline is nullptr");
1210 1 : if (m_gstPlayerClient)
1211 1 : m_gstPlayerClient->notifyPlaybackState(PlaybackState::FAILURE);
1212 1 : return false;
1213 : }
1214 3 : if (m_gstWrapper->gstElementSetState(m_context.pipeline, newState) == GST_STATE_CHANGE_FAILURE)
1215 : {
1216 1 : RIALTO_SERVER_LOG_ERROR("Change state failed - Gstreamer returned an error");
1217 1 : if (m_gstPlayerClient)
1218 1 : m_gstPlayerClient->notifyPlaybackState(PlaybackState::FAILURE);
1219 1 : return false;
1220 : }
1221 2 : return true;
1222 : }
1223 :
1224 9 : int64_t GstGenericPlayer::getPosition(GstElement *element)
1225 : {
1226 9 : if (!element)
1227 : {
1228 1 : RIALTO_SERVER_LOG_WARN("Element is null");
1229 1 : return -1;
1230 : }
1231 :
1232 8 : m_gstWrapper->gstStateLock(element);
1233 :
1234 16 : if (m_gstWrapper->gstElementGetState(element) < GST_STATE_PAUSED ||
1235 8 : (m_gstWrapper->gstElementGetStateReturn(element) == GST_STATE_CHANGE_ASYNC &&
1236 1 : m_gstWrapper->gstElementGetStateNext(element) == GST_STATE_PAUSED))
1237 : {
1238 1 : RIALTO_SERVER_LOG_WARN("Element is prerolling or in invalid state - state: %s, return: %s, next: %s",
1239 : m_gstWrapper->gstElementStateGetName(m_gstWrapper->gstElementGetState(element)),
1240 : m_gstWrapper->gstElementStateChangeReturnGetName(
1241 : m_gstWrapper->gstElementGetStateReturn(element)),
1242 : m_gstWrapper->gstElementStateGetName(m_gstWrapper->gstElementGetStateNext(element)));
1243 :
1244 1 : m_gstWrapper->gstStateUnlock(element);
1245 1 : return -1;
1246 : }
1247 7 : m_gstWrapper->gstStateUnlock(element);
1248 :
1249 7 : gint64 position = -1;
1250 7 : if (!m_gstWrapper->gstElementQueryPosition(m_context.pipeline, GST_FORMAT_TIME, &position))
1251 : {
1252 1 : RIALTO_SERVER_LOG_WARN("Failed to query position");
1253 1 : return -1;
1254 : }
1255 :
1256 6 : return position;
1257 : }
1258 :
1259 1 : void GstGenericPlayer::setVideoGeometry(int x, int y, int width, int height)
1260 : {
1261 1 : if (m_workerThread)
1262 : {
1263 2 : m_workerThread->enqueueTask(
1264 2 : m_taskFactory->createSetVideoGeometry(m_context, *this, Rectangle{x, y, width, height}));
1265 : }
1266 1 : }
1267 :
1268 1 : void GstGenericPlayer::setEos(const firebolt::rialto::MediaSourceType &type)
1269 : {
1270 1 : if (m_workerThread)
1271 : {
1272 1 : m_workerThread->enqueueTask(m_taskFactory->createEos(m_context, *this, type));
1273 : }
1274 : }
1275 :
1276 4 : bool GstGenericPlayer::setVideoSinkRectangle()
1277 : {
1278 4 : bool result = false;
1279 4 : GstElement *videoSink{getSink(MediaSourceType::VIDEO)};
1280 4 : if (videoSink)
1281 : {
1282 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "rectangle"))
1283 : {
1284 : std::string rect =
1285 4 : std::to_string(m_context.pendingGeometry.x) + ',' + std::to_string(m_context.pendingGeometry.y) + ',' +
1286 6 : std::to_string(m_context.pendingGeometry.width) + ',' + std::to_string(m_context.pendingGeometry.height);
1287 2 : m_glibWrapper->gObjectSet(videoSink, "rectangle", rect.c_str(), nullptr);
1288 2 : m_context.pendingGeometry.clear();
1289 2 : result = true;
1290 : }
1291 : else
1292 : {
1293 1 : RIALTO_SERVER_LOG_ERROR("Failed to set the video rectangle");
1294 : }
1295 3 : m_gstWrapper->gstObjectUnref(videoSink);
1296 : }
1297 :
1298 4 : return result;
1299 : }
1300 :
1301 3 : bool GstGenericPlayer::setImmediateOutput()
1302 : {
1303 3 : bool result{false};
1304 3 : if (m_context.pendingImmediateOutputForVideo.has_value())
1305 : {
1306 3 : GstElement *sink{getSink(MediaSourceType::VIDEO)};
1307 3 : if (sink)
1308 : {
1309 2 : bool immediateOutput{m_context.pendingImmediateOutputForVideo.value()};
1310 2 : RIALTO_SERVER_LOG_DEBUG("Set immediate-output to %s", immediateOutput ? "TRUE" : "FALSE");
1311 :
1312 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "immediate-output"))
1313 : {
1314 1 : gboolean immediateOutputGboolean{immediateOutput ? TRUE : FALSE};
1315 1 : m_glibWrapper->gObjectSet(sink, "immediate-output", immediateOutputGboolean, nullptr);
1316 1 : result = true;
1317 : }
1318 : else
1319 : {
1320 1 : RIALTO_SERVER_LOG_ERROR("Failed to set immediate-output property on sink '%s'", GST_ELEMENT_NAME(sink));
1321 : }
1322 2 : m_context.pendingImmediateOutputForVideo.reset();
1323 2 : m_gstWrapper->gstObjectUnref(sink);
1324 : }
1325 : else
1326 : {
1327 1 : RIALTO_SERVER_LOG_DEBUG("Pending an immediate-output, sink is NULL");
1328 : }
1329 : }
1330 3 : return result;
1331 : }
1332 :
1333 4 : bool GstGenericPlayer::setShowVideoWindow()
1334 : {
1335 4 : if (!m_context.pendingShowVideoWindow.has_value())
1336 : {
1337 1 : RIALTO_SERVER_LOG_WARN("No show video window value to be set. Aborting...");
1338 1 : return false;
1339 : }
1340 :
1341 3 : GstElement *videoSink{getSink(MediaSourceType::VIDEO)};
1342 3 : if (!videoSink)
1343 : {
1344 1 : RIALTO_SERVER_LOG_DEBUG("Setting show video window queued. Video sink is NULL");
1345 1 : return false;
1346 : }
1347 2 : bool result{false};
1348 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "show-video-window"))
1349 : {
1350 1 : m_glibWrapper->gObjectSet(videoSink, "show-video-window", m_context.pendingShowVideoWindow.value(), nullptr);
1351 1 : result = true;
1352 : }
1353 : else
1354 : {
1355 1 : RIALTO_SERVER_LOG_ERROR("Setting show video window failed. Property does not exist");
1356 : }
1357 2 : m_context.pendingShowVideoWindow.reset();
1358 2 : m_gstWrapper->gstObjectUnref(GST_OBJECT(videoSink));
1359 2 : return result;
1360 : }
1361 :
1362 4 : bool GstGenericPlayer::setLowLatency()
1363 : {
1364 4 : bool result{false};
1365 4 : if (m_context.pendingLowLatency.has_value())
1366 : {
1367 4 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1368 4 : if (sink)
1369 : {
1370 3 : bool lowLatency{m_context.pendingLowLatency.value()};
1371 3 : RIALTO_SERVER_LOG_DEBUG("Set low-latency to %s", lowLatency ? "TRUE" : "FALSE");
1372 :
1373 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "low-latency"))
1374 : {
1375 2 : gboolean lowLatencyGboolean{lowLatency ? TRUE : FALSE};
1376 2 : m_glibWrapper->gObjectSet(sink, "low-latency", lowLatencyGboolean, nullptr);
1377 2 : result = true;
1378 : }
1379 : else
1380 : {
1381 1 : RIALTO_SERVER_LOG_ERROR("Failed to set low-latency property on sink '%s'", GST_ELEMENT_NAME(sink));
1382 : }
1383 3 : m_context.pendingLowLatency.reset();
1384 3 : m_gstWrapper->gstObjectUnref(sink);
1385 : }
1386 : else
1387 : {
1388 1 : RIALTO_SERVER_LOG_DEBUG("Pending low-latency, sink is NULL");
1389 : }
1390 : }
1391 4 : return result;
1392 : }
1393 :
1394 3 : bool GstGenericPlayer::setSync()
1395 : {
1396 3 : bool result{false};
1397 3 : if (m_context.pendingSync.has_value())
1398 : {
1399 3 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1400 3 : if (sink)
1401 : {
1402 2 : bool sync{m_context.pendingSync.value()};
1403 2 : RIALTO_SERVER_LOG_DEBUG("Set sync to %s", sync ? "TRUE" : "FALSE");
1404 :
1405 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "sync"))
1406 : {
1407 1 : gboolean syncGboolean{sync ? TRUE : FALSE};
1408 1 : m_glibWrapper->gObjectSet(sink, "sync", syncGboolean, nullptr);
1409 1 : result = true;
1410 : }
1411 : else
1412 : {
1413 1 : RIALTO_SERVER_LOG_ERROR("Failed to set sync property on sink '%s'", GST_ELEMENT_NAME(sink));
1414 : }
1415 2 : m_context.pendingSync.reset();
1416 2 : m_gstWrapper->gstObjectUnref(sink);
1417 : }
1418 : else
1419 : {
1420 1 : RIALTO_SERVER_LOG_DEBUG("Pending sync, sink is NULL");
1421 : }
1422 : }
1423 3 : return result;
1424 : }
1425 :
1426 3 : bool GstGenericPlayer::setSyncOff()
1427 : {
1428 3 : bool result{false};
1429 3 : if (m_context.pendingSyncOff.has_value())
1430 : {
1431 3 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1432 3 : if (decoder)
1433 : {
1434 2 : bool syncOff{m_context.pendingSyncOff.value()};
1435 2 : RIALTO_SERVER_LOG_DEBUG("Set sync-off to %s", syncOff ? "TRUE" : "FALSE");
1436 :
1437 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "sync-off"))
1438 : {
1439 1 : gboolean syncOffGboolean{decoder ? TRUE : FALSE};
1440 1 : m_glibWrapper->gObjectSet(decoder, "sync-off", syncOffGboolean, nullptr);
1441 1 : result = true;
1442 : }
1443 : else
1444 : {
1445 1 : RIALTO_SERVER_LOG_ERROR("Failed to set sync-off property on decoder '%s'", GST_ELEMENT_NAME(decoder));
1446 : }
1447 2 : m_context.pendingSyncOff.reset();
1448 2 : m_gstWrapper->gstObjectUnref(decoder);
1449 : }
1450 : else
1451 : {
1452 1 : RIALTO_SERVER_LOG_DEBUG("Pending sync-off, decoder is NULL");
1453 : }
1454 : }
1455 3 : return result;
1456 : }
1457 :
1458 6 : bool GstGenericPlayer::setStreamSyncMode(const MediaSourceType &type)
1459 : {
1460 6 : bool result{false};
1461 6 : int32_t streamSyncMode{0};
1462 : {
1463 6 : std::unique_lock lock{m_context.propertyMutex};
1464 6 : if (m_context.pendingStreamSyncMode.find(type) == m_context.pendingStreamSyncMode.end())
1465 : {
1466 0 : return false;
1467 : }
1468 6 : streamSyncMode = m_context.pendingStreamSyncMode[type];
1469 : }
1470 6 : if (MediaSourceType::AUDIO == type)
1471 : {
1472 3 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1473 3 : if (!decoder)
1474 : {
1475 1 : RIALTO_SERVER_LOG_DEBUG("Pending stream-sync-mode, decoder is NULL");
1476 1 : return false;
1477 : }
1478 :
1479 2 : RIALTO_SERVER_LOG_DEBUG("Set stream-sync-mode to %d", streamSyncMode);
1480 :
1481 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "stream-sync-mode"))
1482 : {
1483 1 : gint streamSyncModeGint{static_cast<gint>(streamSyncMode)};
1484 1 : m_glibWrapper->gObjectSet(decoder, "stream-sync-mode", streamSyncModeGint, nullptr);
1485 1 : result = true;
1486 : }
1487 : else
1488 : {
1489 1 : RIALTO_SERVER_LOG_ERROR("Failed to set stream-sync-mode property on decoder '%s'", GST_ELEMENT_NAME(decoder));
1490 : }
1491 2 : m_gstWrapper->gstObjectUnref(decoder);
1492 2 : std::unique_lock lock{m_context.propertyMutex};
1493 2 : m_context.pendingStreamSyncMode.erase(type);
1494 : }
1495 3 : else if (MediaSourceType::VIDEO == type)
1496 : {
1497 3 : GstElement *parser = getParser(MediaSourceType::VIDEO);
1498 3 : if (!parser)
1499 : {
1500 1 : RIALTO_SERVER_LOG_DEBUG("Pending syncmode-streaming, parser is NULL");
1501 1 : return false;
1502 : }
1503 :
1504 2 : gboolean streamSyncModeBoolean{static_cast<gboolean>(streamSyncMode)};
1505 2 : RIALTO_SERVER_LOG_DEBUG("Set syncmode-streaming to %d", streamSyncMode);
1506 :
1507 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(parser), "syncmode-streaming"))
1508 : {
1509 1 : m_glibWrapper->gObjectSet(parser, "syncmode-streaming", streamSyncModeBoolean, nullptr);
1510 1 : result = true;
1511 : }
1512 : else
1513 : {
1514 1 : RIALTO_SERVER_LOG_ERROR("Failed to set syncmode-streaming property on parser '%s'", GST_ELEMENT_NAME(parser));
1515 : }
1516 2 : m_gstWrapper->gstObjectUnref(parser);
1517 2 : std::unique_lock lock{m_context.propertyMutex};
1518 2 : m_context.pendingStreamSyncMode.erase(type);
1519 : }
1520 4 : return result;
1521 : }
1522 :
1523 3 : bool GstGenericPlayer::setRenderFrame()
1524 : {
1525 3 : bool result{false};
1526 3 : if (m_context.pendingRenderFrame)
1527 : {
1528 5 : static const std::string kStepOnPrerollPropertyName = "frame-step-on-preroll";
1529 3 : GstElement *sink{getSink(MediaSourceType::VIDEO)};
1530 3 : if (sink)
1531 : {
1532 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), kStepOnPrerollPropertyName.c_str()))
1533 : {
1534 1 : RIALTO_SERVER_LOG_INFO("Rendering preroll");
1535 :
1536 1 : m_glibWrapper->gObjectSet(sink, kStepOnPrerollPropertyName.c_str(), 1, nullptr);
1537 1 : m_gstWrapper->gstElementSendEvent(sink, m_gstWrapper->gstEventNewStep(GST_FORMAT_BUFFERS, 1, 1.0, true,
1538 : false));
1539 1 : m_glibWrapper->gObjectSet(sink, kStepOnPrerollPropertyName.c_str(), 0, nullptr);
1540 1 : result = true;
1541 : }
1542 : else
1543 : {
1544 1 : RIALTO_SERVER_LOG_ERROR("Video sink doesn't have property `%s`", kStepOnPrerollPropertyName.c_str());
1545 : }
1546 2 : m_gstWrapper->gstObjectUnref(sink);
1547 2 : m_context.pendingRenderFrame = false;
1548 : }
1549 : else
1550 : {
1551 1 : RIALTO_SERVER_LOG_DEBUG("Pending render frame, sink is NULL");
1552 : }
1553 : }
1554 3 : return result;
1555 : }
1556 :
1557 3 : bool GstGenericPlayer::setBufferingLimit()
1558 : {
1559 3 : bool result{false};
1560 3 : guint bufferingLimit{0};
1561 : {
1562 3 : std::unique_lock lock{m_context.propertyMutex};
1563 3 : if (!m_context.pendingBufferingLimit.has_value())
1564 : {
1565 0 : return false;
1566 : }
1567 3 : bufferingLimit = static_cast<guint>(m_context.pendingBufferingLimit.value());
1568 : }
1569 :
1570 3 : GstElement *decoder{getDecoder(MediaSourceType::AUDIO)};
1571 3 : if (decoder)
1572 : {
1573 2 : RIALTO_SERVER_LOG_DEBUG("Set limit-buffering-ms to %u", bufferingLimit);
1574 :
1575 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "limit-buffering-ms"))
1576 : {
1577 1 : m_glibWrapper->gObjectSet(decoder, "limit-buffering-ms", bufferingLimit, nullptr);
1578 1 : result = true;
1579 : }
1580 : else
1581 : {
1582 1 : RIALTO_SERVER_LOG_ERROR("Failed to set limit-buffering-ms property on decoder '%s'",
1583 : GST_ELEMENT_NAME(decoder));
1584 : }
1585 2 : m_gstWrapper->gstObjectUnref(decoder);
1586 2 : std::unique_lock lock{m_context.propertyMutex};
1587 2 : m_context.pendingBufferingLimit.reset();
1588 : }
1589 : else
1590 : {
1591 1 : RIALTO_SERVER_LOG_DEBUG("Pending limit-buffering-ms, decoder is NULL");
1592 : }
1593 3 : return result;
1594 : }
1595 :
1596 2 : bool GstGenericPlayer::setUseBuffering()
1597 : {
1598 2 : std::unique_lock lock{m_context.propertyMutex};
1599 2 : if (m_context.pendingUseBuffering.has_value())
1600 : {
1601 2 : if (m_context.playbackGroup.m_curAudioDecodeBin)
1602 : {
1603 1 : gboolean useBufferingGboolean{m_context.pendingUseBuffering.value() ? TRUE : FALSE};
1604 1 : RIALTO_SERVER_LOG_DEBUG("Set use-buffering to %d", useBufferingGboolean);
1605 1 : m_glibWrapper->gObjectSet(m_context.playbackGroup.m_curAudioDecodeBin, "use-buffering",
1606 : useBufferingGboolean, nullptr);
1607 1 : m_context.pendingUseBuffering.reset();
1608 1 : return true;
1609 : }
1610 : else
1611 : {
1612 1 : RIALTO_SERVER_LOG_DEBUG("Pending use-buffering, decodebin is NULL");
1613 : }
1614 : }
1615 1 : return false;
1616 2 : }
1617 :
1618 8 : bool GstGenericPlayer::setWesterossinkSecondaryVideo()
1619 : {
1620 8 : bool result = false;
1621 8 : GstElementFactory *factory = m_gstWrapper->gstElementFactoryFind("westerossink");
1622 8 : if (factory)
1623 : {
1624 7 : GstElement *videoSink = m_gstWrapper->gstElementFactoryCreate(factory, nullptr);
1625 7 : if (videoSink)
1626 : {
1627 5 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "res-usage"))
1628 : {
1629 4 : m_glibWrapper->gObjectSet(videoSink, "res-usage", 0x0u, nullptr);
1630 4 : m_glibWrapper->gObjectSet(m_context.pipeline, "video-sink", videoSink, nullptr);
1631 4 : result = true;
1632 : }
1633 : else
1634 : {
1635 1 : RIALTO_SERVER_LOG_ERROR("Failed to set the westerossink res-usage");
1636 1 : m_gstWrapper->gstObjectUnref(GST_OBJECT(videoSink));
1637 : }
1638 : }
1639 : else
1640 : {
1641 2 : RIALTO_SERVER_LOG_ERROR("Failed to create the westerossink");
1642 : }
1643 :
1644 7 : m_gstWrapper->gstObjectUnref(GST_OBJECT(factory));
1645 : }
1646 : else
1647 : {
1648 : // No westeros sink
1649 1 : result = true;
1650 : }
1651 :
1652 8 : return result;
1653 : }
1654 :
1655 8 : bool GstGenericPlayer::setErmContext()
1656 : {
1657 8 : bool result = false;
1658 8 : GstContext *context = m_gstWrapper->gstContextNew("erm", false);
1659 8 : if (context)
1660 : {
1661 6 : GstStructure *contextStructure = m_gstWrapper->gstContextWritableStructure(context);
1662 6 : if (contextStructure)
1663 : {
1664 5 : m_gstWrapper->gstStructureSet(contextStructure, "res-usage", G_TYPE_UINT, 0x0u, nullptr);
1665 5 : m_gstWrapper->gstElementSetContext(GST_ELEMENT(m_context.pipeline), context);
1666 5 : result = true;
1667 : }
1668 : else
1669 : {
1670 1 : RIALTO_SERVER_LOG_ERROR("Failed to create the erm structure");
1671 : }
1672 6 : m_gstWrapper->gstContextUnref(context);
1673 : }
1674 : else
1675 : {
1676 2 : RIALTO_SERVER_LOG_ERROR("Failed to create the erm context");
1677 : }
1678 :
1679 8 : return result;
1680 : }
1681 :
1682 6 : void GstGenericPlayer::startPositionReportingAndCheckAudioUnderflowTimer()
1683 : {
1684 6 : if (m_positionReportingAndCheckAudioUnderflowTimer && m_positionReportingAndCheckAudioUnderflowTimer->isActive())
1685 : {
1686 1 : return;
1687 : }
1688 :
1689 15 : m_positionReportingAndCheckAudioUnderflowTimer = m_timerFactory->createTimer(
1690 : kPositionReportTimerMs,
1691 10 : [this]()
1692 : {
1693 1 : if (m_workerThread)
1694 : {
1695 1 : m_workerThread->enqueueTask(m_taskFactory->createReportPosition(m_context, *this));
1696 1 : m_workerThread->enqueueTask(m_taskFactory->createCheckAudioUnderflow(m_context, *this));
1697 : }
1698 1 : },
1699 5 : firebolt::rialto::common::TimerType::PERIODIC);
1700 : }
1701 :
1702 4 : void GstGenericPlayer::stopPositionReportingAndCheckAudioUnderflowTimer()
1703 : {
1704 4 : if (m_positionReportingAndCheckAudioUnderflowTimer && m_positionReportingAndCheckAudioUnderflowTimer->isActive())
1705 : {
1706 1 : m_positionReportingAndCheckAudioUnderflowTimer->cancel();
1707 1 : m_positionReportingAndCheckAudioUnderflowTimer.reset();
1708 : }
1709 4 : }
1710 :
1711 0 : void GstGenericPlayer::startSubtitleClockResyncTimer()
1712 : {
1713 0 : if (m_subtitleClockResyncTimer && m_subtitleClockResyncTimer->isActive())
1714 : {
1715 0 : return;
1716 : }
1717 :
1718 0 : m_subtitleClockResyncTimer = m_timerFactory->createTimer(
1719 : kSubtitleClockResyncInterval,
1720 0 : [this]()
1721 : {
1722 0 : if (m_workerThread)
1723 : {
1724 0 : m_workerThread->enqueueTask(m_taskFactory->createSynchroniseSubtitleClock(m_context, *this));
1725 : }
1726 0 : },
1727 0 : firebolt::rialto::common::TimerType::PERIODIC);
1728 : }
1729 :
1730 0 : void GstGenericPlayer::stopSubtitleClockResyncTimer()
1731 : {
1732 0 : if (m_subtitleClockResyncTimer && m_subtitleClockResyncTimer->isActive())
1733 : {
1734 0 : m_subtitleClockResyncTimer->cancel();
1735 0 : m_subtitleClockResyncTimer.reset();
1736 : }
1737 : }
1738 :
1739 2 : void GstGenericPlayer::stopWorkerThread()
1740 : {
1741 2 : if (m_workerThread)
1742 : {
1743 2 : m_workerThread->stop();
1744 : }
1745 : }
1746 :
1747 0 : void GstGenericPlayer::setPendingPlaybackRate()
1748 : {
1749 0 : RIALTO_SERVER_LOG_INFO("Setting pending playback rate");
1750 0 : setPlaybackRate(m_context.pendingPlaybackRate);
1751 : }
1752 :
1753 1 : void GstGenericPlayer::renderFrame()
1754 : {
1755 1 : if (m_workerThread)
1756 : {
1757 1 : m_workerThread->enqueueTask(m_taskFactory->createRenderFrame(m_context, *this));
1758 : }
1759 : }
1760 :
1761 18 : void GstGenericPlayer::setVolume(double targetVolume, uint32_t volumeDuration, firebolt::rialto::EaseType easeType)
1762 : {
1763 18 : if (m_workerThread)
1764 : {
1765 36 : m_workerThread->enqueueTask(
1766 36 : m_taskFactory->createSetVolume(m_context, *this, targetVolume, volumeDuration, easeType));
1767 : }
1768 18 : }
1769 :
1770 3 : bool GstGenericPlayer::getVolume(double ¤tVolume)
1771 : {
1772 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
1773 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
1774 3 : if (!m_context.pipeline)
1775 : {
1776 0 : return false;
1777 : }
1778 :
1779 : // NOTE: No gstreamer documentation for "fade-volume" could be found at the time this code was written.
1780 : // Therefore the author performed several tests on a supported platform (Flex2) to determine the behaviour of this property.
1781 : // The code has been written to be backwardly compatible on platforms that don't have this property.
1782 : // The observed behaviour was:
1783 : // - if the returned fade volume is negative then audio-fade is not active. In this case the usual technique
1784 : // to find volume in the pipeline works and is used.
1785 : // - if the returned fade volume is positive then audio-fade is active. In this case the returned fade volume
1786 : // directly returns the current volume level 0=min to 100=max (and the pipeline's current volume level is
1787 : // meaningless and doesn't contribute in this case).
1788 3 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1789 5 : if (m_context.audioFadeEnabled && sink &&
1790 2 : m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "fade-volume"))
1791 : {
1792 2 : gint fadeVolume{-100};
1793 2 : m_glibWrapper->gObjectGet(sink, "fade-volume", &fadeVolume, NULL);
1794 2 : if (fadeVolume < 0)
1795 : {
1796 1 : currentVolume = m_gstWrapper->gstStreamVolumeGetVolume(GST_STREAM_VOLUME(m_context.pipeline),
1797 : GST_STREAM_VOLUME_FORMAT_LINEAR);
1798 1 : RIALTO_SERVER_LOG_INFO("Fade volume is negative, using volume from pipeline: %f", currentVolume);
1799 : }
1800 : else
1801 : {
1802 1 : currentVolume = static_cast<double>(fadeVolume) / 100.0;
1803 1 : RIALTO_SERVER_LOG_INFO("Fade volume is supported: %f", currentVolume);
1804 : }
1805 : }
1806 : else
1807 : {
1808 1 : currentVolume = m_gstWrapper->gstStreamVolumeGetVolume(GST_STREAM_VOLUME(m_context.pipeline),
1809 : GST_STREAM_VOLUME_FORMAT_LINEAR);
1810 1 : RIALTO_SERVER_LOG_INFO("Fade volume is not supported, using volume from pipeline: %f", currentVolume);
1811 : }
1812 :
1813 3 : if (sink)
1814 2 : m_gstWrapper->gstObjectUnref(sink);
1815 :
1816 3 : return true;
1817 : }
1818 :
1819 1 : void GstGenericPlayer::setMute(const MediaSourceType &mediaSourceType, bool mute)
1820 : {
1821 1 : if (m_workerThread)
1822 : {
1823 1 : m_workerThread->enqueueTask(m_taskFactory->createSetMute(m_context, *this, mediaSourceType, mute));
1824 : }
1825 : }
1826 :
1827 5 : bool GstGenericPlayer::getMute(const MediaSourceType &mediaSourceType, bool &mute)
1828 : {
1829 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
1830 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
1831 5 : if (mediaSourceType == MediaSourceType::SUBTITLE)
1832 : {
1833 2 : if (!m_context.subtitleSink)
1834 : {
1835 1 : RIALTO_SERVER_LOG_ERROR("There is no subtitle sink");
1836 1 : return false;
1837 : }
1838 1 : gboolean muteValue{FALSE};
1839 1 : m_glibWrapper->gObjectGet(m_context.subtitleSink, "mute", &muteValue, nullptr);
1840 1 : mute = muteValue;
1841 : }
1842 3 : else if (mediaSourceType == MediaSourceType::AUDIO)
1843 : {
1844 2 : if (!m_context.pipeline)
1845 : {
1846 1 : return false;
1847 : }
1848 1 : mute = m_gstWrapper->gstStreamVolumeGetMute(GST_STREAM_VOLUME(m_context.pipeline));
1849 : }
1850 : else
1851 : {
1852 1 : RIALTO_SERVER_LOG_ERROR("Getting mute for type %s unsupported", common::convertMediaSourceType(mediaSourceType));
1853 1 : return false;
1854 : }
1855 :
1856 2 : return true;
1857 : }
1858 :
1859 1 : bool GstGenericPlayer::isAsync(const MediaSourceType &mediaSourceType) const
1860 : {
1861 1 : GstElement *sink = getSink(mediaSourceType);
1862 1 : if (!sink)
1863 : {
1864 0 : RIALTO_SERVER_LOG_WARN("Sink not found for %s", common::convertMediaSourceType(mediaSourceType));
1865 0 : return true; // Our sinks are async by default
1866 : }
1867 1 : gboolean returnValue{TRUE};
1868 1 : m_glibWrapper->gObjectGet(sink, "async", &returnValue, nullptr);
1869 1 : m_gstWrapper->gstObjectUnref(sink);
1870 1 : return returnValue == TRUE;
1871 : }
1872 :
1873 1 : void GstGenericPlayer::setTextTrackIdentifier(const std::string &textTrackIdentifier)
1874 : {
1875 1 : if (m_workerThread)
1876 : {
1877 1 : m_workerThread->enqueueTask(m_taskFactory->createSetTextTrackIdentifier(m_context, textTrackIdentifier));
1878 : }
1879 : }
1880 :
1881 3 : bool GstGenericPlayer::getTextTrackIdentifier(std::string &textTrackIdentifier)
1882 : {
1883 3 : if (!m_context.subtitleSink)
1884 : {
1885 1 : RIALTO_SERVER_LOG_ERROR("There is no subtitle sink");
1886 1 : return false;
1887 : }
1888 :
1889 2 : gchar *identifier = nullptr;
1890 2 : m_glibWrapper->gObjectGet(m_context.subtitleSink, "text-track-identifier", &identifier, nullptr);
1891 :
1892 2 : if (identifier)
1893 : {
1894 1 : textTrackIdentifier = identifier;
1895 1 : m_glibWrapper->gFree(identifier);
1896 1 : return true;
1897 : }
1898 : else
1899 : {
1900 1 : RIALTO_SERVER_LOG_ERROR("Failed to get text track identifier");
1901 1 : return false;
1902 : }
1903 : }
1904 :
1905 1 : bool GstGenericPlayer::setLowLatency(bool lowLatency)
1906 : {
1907 1 : if (m_workerThread)
1908 : {
1909 1 : m_workerThread->enqueueTask(m_taskFactory->createSetLowLatency(m_context, *this, lowLatency));
1910 : }
1911 1 : return true;
1912 : }
1913 :
1914 1 : bool GstGenericPlayer::setSync(bool sync)
1915 : {
1916 1 : if (m_workerThread)
1917 : {
1918 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSync(m_context, *this, sync));
1919 : }
1920 1 : return true;
1921 : }
1922 :
1923 4 : bool GstGenericPlayer::getSync(bool &sync)
1924 : {
1925 4 : bool returnValue{false};
1926 4 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1927 4 : if (sink)
1928 : {
1929 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "sync"))
1930 : {
1931 1 : m_glibWrapper->gObjectGet(sink, "sync", &sync, nullptr);
1932 1 : returnValue = true;
1933 : }
1934 : else
1935 : {
1936 1 : RIALTO_SERVER_LOG_ERROR("Sync not supported in sink '%s'", GST_ELEMENT_NAME(sink));
1937 : }
1938 2 : m_gstWrapper->gstObjectUnref(sink);
1939 : }
1940 2 : else if (m_context.pendingSync.has_value())
1941 : {
1942 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
1943 1 : sync = m_context.pendingSync.value();
1944 1 : returnValue = true;
1945 : }
1946 : else
1947 : {
1948 : // We dont know the default setting on the sync, so return failure here
1949 1 : RIALTO_SERVER_LOG_WARN("No audio sink attached or queued value");
1950 : }
1951 :
1952 4 : return returnValue;
1953 : }
1954 :
1955 1 : bool GstGenericPlayer::setSyncOff(bool syncOff)
1956 : {
1957 1 : if (m_workerThread)
1958 : {
1959 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSyncOff(m_context, *this, syncOff));
1960 : }
1961 1 : return true;
1962 : }
1963 :
1964 1 : bool GstGenericPlayer::setStreamSyncMode(const MediaSourceType &mediaSourceType, int32_t streamSyncMode)
1965 : {
1966 1 : if (m_workerThread)
1967 : {
1968 2 : m_workerThread->enqueueTask(
1969 2 : m_taskFactory->createSetStreamSyncMode(m_context, *this, mediaSourceType, streamSyncMode));
1970 : }
1971 1 : return true;
1972 : }
1973 :
1974 5 : bool GstGenericPlayer::getStreamSyncMode(int32_t &streamSyncMode)
1975 : {
1976 5 : bool returnValue{false};
1977 5 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1978 5 : if (decoder && m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "stream-sync-mode"))
1979 : {
1980 2 : m_glibWrapper->gObjectGet(decoder, "stream-sync-mode", &streamSyncMode, nullptr);
1981 2 : returnValue = true;
1982 : }
1983 : else
1984 : {
1985 3 : std::unique_lock lock{m_context.propertyMutex};
1986 3 : if (m_context.pendingStreamSyncMode.find(MediaSourceType::AUDIO) != m_context.pendingStreamSyncMode.end())
1987 : {
1988 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
1989 1 : streamSyncMode = m_context.pendingStreamSyncMode[MediaSourceType::AUDIO];
1990 1 : returnValue = true;
1991 : }
1992 : else
1993 : {
1994 2 : RIALTO_SERVER_LOG_ERROR("Stream sync mode not supported in decoder '%s'",
1995 : (decoder ? GST_ELEMENT_NAME(decoder) : "null"));
1996 : }
1997 3 : }
1998 :
1999 5 : if (decoder)
2000 3 : m_gstWrapper->gstObjectUnref(GST_OBJECT(decoder));
2001 :
2002 5 : return returnValue;
2003 : }
2004 :
2005 1 : void GstGenericPlayer::ping(std::unique_ptr<IHeartbeatHandler> &&heartbeatHandler)
2006 : {
2007 1 : if (m_workerThread)
2008 : {
2009 1 : m_workerThread->enqueueTask(m_taskFactory->createPing(std::move(heartbeatHandler)));
2010 : }
2011 : }
2012 :
2013 1 : void GstGenericPlayer::flush(const MediaSourceType &mediaSourceType, bool resetTime, bool &async)
2014 : {
2015 1 : if (m_workerThread)
2016 : {
2017 1 : async = isAsync(mediaSourceType);
2018 1 : m_flushWatcher->setFlushing(mediaSourceType, async);
2019 1 : m_workerThread->enqueueTask(m_taskFactory->createFlush(m_context, *this, mediaSourceType, resetTime));
2020 : }
2021 : }
2022 :
2023 1 : void GstGenericPlayer::setSourcePosition(const MediaSourceType &mediaSourceType, int64_t position, bool resetTime,
2024 : double appliedRate, uint64_t stopPosition)
2025 : {
2026 1 : if (m_workerThread)
2027 : {
2028 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSourcePosition(m_context, mediaSourceType, position,
2029 : resetTime, appliedRate, stopPosition));
2030 : }
2031 : }
2032 :
2033 0 : void GstGenericPlayer::setSubtitleOffset(int64_t position)
2034 : {
2035 0 : if (m_workerThread)
2036 : {
2037 0 : m_workerThread->enqueueTask(m_taskFactory->createSetSubtitleOffset(m_context, position));
2038 : }
2039 : }
2040 :
2041 1 : void GstGenericPlayer::processAudioGap(int64_t position, uint32_t duration, int64_t discontinuityGap, bool audioAac)
2042 : {
2043 1 : if (m_workerThread)
2044 : {
2045 2 : m_workerThread->enqueueTask(
2046 2 : m_taskFactory->createProcessAudioGap(m_context, position, duration, discontinuityGap, audioAac));
2047 : }
2048 1 : }
2049 :
2050 1 : void GstGenericPlayer::setBufferingLimit(uint32_t limitBufferingMs)
2051 : {
2052 1 : if (m_workerThread)
2053 : {
2054 1 : m_workerThread->enqueueTask(m_taskFactory->createSetBufferingLimit(m_context, *this, limitBufferingMs));
2055 : }
2056 : }
2057 :
2058 5 : bool GstGenericPlayer::getBufferingLimit(uint32_t &limitBufferingMs)
2059 : {
2060 5 : bool returnValue{false};
2061 5 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
2062 5 : if (decoder && m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "limit-buffering-ms"))
2063 : {
2064 2 : m_glibWrapper->gObjectGet(decoder, "limit-buffering-ms", &limitBufferingMs, nullptr);
2065 2 : returnValue = true;
2066 : }
2067 : else
2068 : {
2069 3 : std::unique_lock lock{m_context.propertyMutex};
2070 3 : if (m_context.pendingBufferingLimit.has_value())
2071 : {
2072 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
2073 1 : limitBufferingMs = m_context.pendingBufferingLimit.value();
2074 1 : returnValue = true;
2075 : }
2076 : else
2077 : {
2078 2 : RIALTO_SERVER_LOG_ERROR("buffering limit not supported in decoder '%s'",
2079 : (decoder ? GST_ELEMENT_NAME(decoder) : "null"));
2080 : }
2081 3 : }
2082 :
2083 5 : if (decoder)
2084 3 : m_gstWrapper->gstObjectUnref(GST_OBJECT(decoder));
2085 :
2086 5 : return returnValue;
2087 : }
2088 :
2089 1 : void GstGenericPlayer::setUseBuffering(bool useBuffering)
2090 : {
2091 1 : if (m_workerThread)
2092 : {
2093 1 : m_workerThread->enqueueTask(m_taskFactory->createSetUseBuffering(m_context, *this, useBuffering));
2094 : }
2095 : }
2096 :
2097 3 : bool GstGenericPlayer::getUseBuffering(bool &useBuffering)
2098 : {
2099 3 : if (m_context.playbackGroup.m_curAudioDecodeBin)
2100 : {
2101 1 : m_glibWrapper->gObjectGet(m_context.playbackGroup.m_curAudioDecodeBin, "use-buffering", &useBuffering, nullptr);
2102 1 : return true;
2103 : }
2104 : else
2105 : {
2106 2 : std::unique_lock lock{m_context.propertyMutex};
2107 2 : if (m_context.pendingUseBuffering.has_value())
2108 : {
2109 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
2110 1 : useBuffering = m_context.pendingUseBuffering.value();
2111 1 : return true;
2112 : }
2113 2 : }
2114 1 : return false;
2115 : }
2116 :
2117 1 : void GstGenericPlayer::switchSource(const std::unique_ptr<IMediaPipeline::MediaSource> &mediaSource)
2118 : {
2119 1 : if (m_workerThread)
2120 : {
2121 1 : m_workerThread->enqueueTask(m_taskFactory->createSwitchSource(*this, mediaSource));
2122 : }
2123 : }
2124 :
2125 1 : void GstGenericPlayer::handleBusMessage(GstMessage *message)
2126 : {
2127 1 : m_workerThread->enqueueTask(m_taskFactory->createHandleBusMessage(m_context, *this, message, *m_flushWatcher));
2128 : }
2129 :
2130 1 : void GstGenericPlayer::updatePlaybackGroup(GstElement *typefind, const GstCaps *caps)
2131 : {
2132 1 : m_workerThread->enqueueTask(m_taskFactory->createUpdatePlaybackGroup(m_context, *this, typefind, caps));
2133 : }
2134 :
2135 3 : void GstGenericPlayer::addAutoVideoSinkChild(GObject *object)
2136 : {
2137 : // Only add children that are sinks
2138 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2139 : {
2140 2 : RIALTO_SERVER_LOG_DEBUG("Store AutoVideoSink child sink");
2141 :
2142 2 : if (m_context.autoVideoChildSink && m_context.autoVideoChildSink != GST_ELEMENT(object))
2143 : {
2144 1 : RIALTO_SERVER_LOG_MIL("AutoVideoSink child is been overwritten");
2145 : }
2146 2 : m_context.autoVideoChildSink = GST_ELEMENT(object);
2147 : }
2148 3 : }
2149 :
2150 3 : void GstGenericPlayer::addAutoAudioSinkChild(GObject *object)
2151 : {
2152 : // Only add children that are sinks
2153 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2154 : {
2155 2 : RIALTO_SERVER_LOG_DEBUG("Store AutoAudioSink child sink");
2156 :
2157 2 : if (m_context.autoAudioChildSink && m_context.autoAudioChildSink != GST_ELEMENT(object))
2158 : {
2159 1 : RIALTO_SERVER_LOG_MIL("AutoAudioSink child is been overwritten");
2160 : }
2161 2 : m_context.autoAudioChildSink = GST_ELEMENT(object);
2162 : }
2163 3 : }
2164 :
2165 3 : void GstGenericPlayer::removeAutoVideoSinkChild(GObject *object)
2166 : {
2167 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2168 : {
2169 3 : RIALTO_SERVER_LOG_DEBUG("Remove AutoVideoSink child sink");
2170 :
2171 3 : if (m_context.autoVideoChildSink && m_context.autoVideoChildSink != GST_ELEMENT(object))
2172 : {
2173 1 : RIALTO_SERVER_LOG_MIL("AutoVideoSink child sink is not the same as the one stored");
2174 1 : return;
2175 : }
2176 :
2177 2 : m_context.autoVideoChildSink = nullptr;
2178 : }
2179 : }
2180 :
2181 3 : void GstGenericPlayer::removeAutoAudioSinkChild(GObject *object)
2182 : {
2183 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2184 : {
2185 3 : RIALTO_SERVER_LOG_DEBUG("Remove AutoAudioSink child sink");
2186 :
2187 3 : if (m_context.autoAudioChildSink && m_context.autoAudioChildSink != GST_ELEMENT(object))
2188 : {
2189 1 : RIALTO_SERVER_LOG_MIL("AutoAudioSink child sink is not the same as the one stored");
2190 1 : return;
2191 : }
2192 :
2193 2 : m_context.autoAudioChildSink = nullptr;
2194 : }
2195 : }
2196 :
2197 14 : GstElement *GstGenericPlayer::getSinkChildIfAutoVideoSink(GstElement *sink) const
2198 : {
2199 14 : const gchar *kTmpName = m_glibWrapper->gTypeName(G_OBJECT_TYPE(sink));
2200 14 : if (!kTmpName)
2201 0 : return sink;
2202 :
2203 28 : const std::string kElementTypeName{kTmpName};
2204 14 : if (kElementTypeName == "GstAutoVideoSink")
2205 : {
2206 1 : if (!m_context.autoVideoChildSink)
2207 : {
2208 0 : RIALTO_SERVER_LOG_WARN("No child sink has been added to the autovideosink");
2209 : }
2210 : else
2211 : {
2212 1 : return m_context.autoVideoChildSink;
2213 : }
2214 : }
2215 13 : return sink;
2216 14 : }
2217 :
2218 11 : GstElement *GstGenericPlayer::getSinkChildIfAutoAudioSink(GstElement *sink) const
2219 : {
2220 11 : const gchar *kTmpName = m_glibWrapper->gTypeName(G_OBJECT_TYPE(sink));
2221 11 : if (!kTmpName)
2222 0 : return sink;
2223 :
2224 22 : const std::string kElementTypeName{kTmpName};
2225 11 : if (kElementTypeName == "GstAutoAudioSink")
2226 : {
2227 1 : if (!m_context.autoAudioChildSink)
2228 : {
2229 0 : RIALTO_SERVER_LOG_WARN("No child sink has been added to the autoaudiosink");
2230 : }
2231 : else
2232 : {
2233 1 : return m_context.autoAudioChildSink;
2234 : }
2235 : }
2236 10 : return sink;
2237 11 : }
2238 :
2239 208 : void GstGenericPlayer::setPlaybinFlags(bool enableAudio)
2240 : {
2241 208 : unsigned flags = getGstPlayFlag("video") | getGstPlayFlag("native-video") | getGstPlayFlag("text");
2242 :
2243 208 : if (enableAudio)
2244 : {
2245 208 : flags |= getGstPlayFlag("audio");
2246 208 : flags |= shouldEnableNativeAudio() ? getGstPlayFlag("native-audio") : 0;
2247 : }
2248 :
2249 208 : m_glibWrapper->gObjectSet(m_context.pipeline, "flags", flags, nullptr);
2250 : }
2251 :
2252 208 : bool GstGenericPlayer::shouldEnableNativeAudio()
2253 : {
2254 208 : GstElementFactory *factory = m_gstWrapper->gstElementFactoryFind("brcmaudiosink");
2255 208 : if (factory)
2256 : {
2257 1 : m_gstWrapper->gstObjectUnref(GST_OBJECT(factory));
2258 1 : return true;
2259 : }
2260 207 : return false;
2261 : }
2262 :
2263 : }; // namespace firebolt::rialto::server
|