Line data Source code
1 : /*
2 : * If not stated otherwise in this file or this component's LICENSE file the
3 : * following copyright and licenses apply:
4 : *
5 : * Copyright 2022 Sky UK
6 : *
7 : * Licensed under the Apache License, Version 2.0 (the "License");
8 : * you may not use this file except in compliance with the License.
9 : * You may obtain a copy of the License at
10 : *
11 : * http://www.apache.org/licenses/LICENSE-2.0
12 : *
13 : * Unless required by applicable law or agreed to in writing, software
14 : * distributed under the License is distributed on an "AS IS" BASIS,
15 : * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 : * See the License for the specific language governing permissions and
17 : * limitations under the License.
18 : */
19 :
20 : #include <chrono>
21 : #include <cinttypes>
22 : #include <stdexcept>
23 :
24 : #include "FlushWatcher.h"
25 : #include "GstDispatcherThread.h"
26 : #include "GstGenericPlayer.h"
27 : #include "GstProtectionMetadata.h"
28 : #include "IGstTextTrackSinkFactory.h"
29 : #include "IMediaPipeline.h"
30 : #include "ITimer.h"
31 : #include "RialtoServerLogging.h"
32 : #include "TypeConverters.h"
33 : #include "Utils.h"
34 : #include "WorkerThread.h"
35 : #include "tasks/generic/GenericPlayerTaskFactory.h"
36 :
37 : namespace
38 : {
39 : /**
40 : * @brief Report position interval in ms.
41 : * The position reporting timer should be started whenever the PLAYING state is entered and stopped
42 : * whenever the session moves to another playback state.
43 : */
44 : constexpr std::chrono::milliseconds kPositionReportTimerMs{250};
45 :
46 1 : bool operator==(const firebolt::rialto::server::SegmentData &lhs, const firebolt::rialto::server::SegmentData &rhs)
47 : {
48 2 : return (lhs.position == rhs.position) && (lhs.resetTime == rhs.resetTime) && (lhs.appliedRate == rhs.appliedRate) &&
49 2 : (lhs.stopPosition == rhs.stopPosition);
50 : }
51 : } // namespace
52 :
53 : namespace firebolt::rialto::server
54 : {
55 : std::weak_ptr<IGstGenericPlayerFactory> GstGenericPlayerFactory::m_factory;
56 :
57 3 : std::shared_ptr<IGstGenericPlayerFactory> IGstGenericPlayerFactory::getFactory()
58 : {
59 3 : std::shared_ptr<IGstGenericPlayerFactory> factory = GstGenericPlayerFactory::m_factory.lock();
60 :
61 3 : if (!factory)
62 : {
63 : try
64 : {
65 3 : factory = std::make_shared<GstGenericPlayerFactory>();
66 : }
67 0 : catch (const std::exception &e)
68 : {
69 0 : RIALTO_SERVER_LOG_ERROR("Failed to create the gstreamer player factory, reason: %s", e.what());
70 : }
71 :
72 3 : GstGenericPlayerFactory::m_factory = factory;
73 : }
74 :
75 3 : return factory;
76 : }
77 :
78 1 : std::unique_ptr<IGstGenericPlayer> GstGenericPlayerFactory::createGstGenericPlayer(
79 : IGstGenericPlayerClient *client, IDecryptionService &decryptionService, MediaType type,
80 : const VideoRequirements &videoRequirements,
81 : const std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapperFactory> &rdkGstreamerUtilsWrapperFactory)
82 : {
83 1 : std::unique_ptr<IGstGenericPlayer> gstPlayer;
84 :
85 : try
86 : {
87 1 : auto gstWrapperFactory = firebolt::rialto::wrappers::IGstWrapperFactory::getFactory();
88 1 : auto glibWrapperFactory = firebolt::rialto::wrappers::IGlibWrapperFactory::getFactory();
89 1 : std::shared_ptr<firebolt::rialto::wrappers::IGstWrapper> gstWrapper;
90 1 : std::shared_ptr<firebolt::rialto::wrappers::IGlibWrapper> glibWrapper;
91 1 : std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapper> rdkGstreamerUtilsWrapper;
92 1 : if ((!gstWrapperFactory) || (!(gstWrapper = gstWrapperFactory->getGstWrapper())))
93 : {
94 0 : throw std::runtime_error("Cannot create GstWrapper");
95 : }
96 1 : if ((!glibWrapperFactory) || (!(glibWrapper = glibWrapperFactory->getGlibWrapper())))
97 : {
98 0 : throw std::runtime_error("Cannot create GlibWrapper");
99 : }
100 2 : if ((!rdkGstreamerUtilsWrapperFactory) ||
101 2 : (!(rdkGstreamerUtilsWrapper = rdkGstreamerUtilsWrapperFactory->createRdkGstreamerUtilsWrapper())))
102 : {
103 0 : throw std::runtime_error("Cannot create RdkGstreamerUtilsWrapper");
104 : }
105 : gstPlayer = std::make_unique<
106 2 : GstGenericPlayer>(client, decryptionService, type, videoRequirements, gstWrapper, glibWrapper,
107 2 : rdkGstreamerUtilsWrapper, IGstInitialiser::instance(), std::make_unique<FlushWatcher>(),
108 2 : IGstSrcFactory::getFactory(), common::ITimerFactory::getFactory(),
109 2 : std::make_unique<GenericPlayerTaskFactory>(client, gstWrapper, glibWrapper,
110 : rdkGstreamerUtilsWrapper,
111 2 : IGstTextTrackSinkFactory::createFactory()),
112 2 : std::make_unique<WorkerThreadFactory>(), std::make_unique<GstDispatcherThreadFactory>(),
113 3 : IGstProtectionMetadataHelperFactory::createFactory());
114 1 : }
115 0 : catch (const std::exception &e)
116 : {
117 0 : RIALTO_SERVER_LOG_ERROR("Failed to create the gstreamer player, reason: %s", e.what());
118 : }
119 :
120 1 : return gstPlayer;
121 : }
122 :
123 213 : GstGenericPlayer::GstGenericPlayer(
124 : IGstGenericPlayerClient *client, IDecryptionService &decryptionService, MediaType type,
125 : const VideoRequirements &videoRequirements,
126 : const std::shared_ptr<firebolt::rialto::wrappers::IGstWrapper> &gstWrapper,
127 : const std::shared_ptr<firebolt::rialto::wrappers::IGlibWrapper> &glibWrapper,
128 : const std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapper> &rdkGstreamerUtilsWrapper,
129 : const IGstInitialiser &gstInitialiser, std::unique_ptr<IFlushWatcher> &&flushWatcher,
130 : const std::shared_ptr<IGstSrcFactory> &gstSrcFactory, std::shared_ptr<common::ITimerFactory> timerFactory,
131 : std::unique_ptr<IGenericPlayerTaskFactory> taskFactory, std::unique_ptr<IWorkerThreadFactory> workerThreadFactory,
132 : std::unique_ptr<IGstDispatcherThreadFactory> gstDispatcherThreadFactory,
133 213 : std::shared_ptr<IGstProtectionMetadataHelperFactory> gstProtectionMetadataFactory)
134 213 : : m_gstPlayerClient(client), m_gstWrapper{gstWrapper}, m_glibWrapper{glibWrapper},
135 426 : m_rdkGstreamerUtilsWrapper{rdkGstreamerUtilsWrapper}, m_timerFactory{timerFactory},
136 639 : m_taskFactory{std::move(taskFactory)}, m_flushWatcher{std::move(flushWatcher)}
137 : {
138 213 : RIALTO_SERVER_LOG_DEBUG("GstGenericPlayer is constructed.");
139 :
140 213 : gstInitialiser.waitForInitialisation();
141 :
142 213 : m_context.decryptionService = &decryptionService;
143 :
144 213 : if ((!gstSrcFactory) || (!(m_context.gstSrc = gstSrcFactory->getGstSrc())))
145 : {
146 2 : throw std::runtime_error("Cannot create GstSrc");
147 : }
148 :
149 211 : if (!timerFactory)
150 : {
151 1 : throw std::runtime_error("TimeFactory is invalid");
152 : }
153 :
154 420 : if ((!gstProtectionMetadataFactory) ||
155 420 : (!(m_protectionMetadataWrapper = gstProtectionMetadataFactory->createProtectionMetadataWrapper(m_gstWrapper))))
156 : {
157 0 : throw std::runtime_error("Cannot create protection metadata wrapper");
158 : }
159 :
160 : // Ensure that rialtosrc has been initalised
161 210 : m_context.gstSrc->initSrc();
162 :
163 : // Start task thread
164 210 : if ((!workerThreadFactory) || (!(m_workerThread = workerThreadFactory->createWorkerThread())))
165 : {
166 0 : throw std::runtime_error("Failed to create the worker thread");
167 : }
168 :
169 : // Initialise pipeline
170 210 : switch (type)
171 : {
172 209 : case MediaType::MSE:
173 : {
174 209 : initMsePipeline();
175 209 : break;
176 : }
177 1 : default:
178 : {
179 1 : resetWorkerThread();
180 1 : throw std::runtime_error("Media type not supported");
181 : }
182 : }
183 :
184 : // Check the video requirements for a limited video.
185 : // If the video requirements are set to anything lower than the minimum, this playback is assumed to be a secondary
186 : // video in a dual video scenario.
187 209 : if ((kMinPrimaryVideoWidth > videoRequirements.maxWidth) || (kMinPrimaryVideoHeight > videoRequirements.maxHeight))
188 : {
189 8 : RIALTO_SERVER_LOG_MIL("Secondary video playback selected");
190 8 : bool westerossinkSecondaryVideoResult = setWesterossinkSecondaryVideo();
191 8 : bool ermContextResult = setErmContext();
192 8 : if (!westerossinkSecondaryVideoResult && !ermContextResult)
193 : {
194 1 : resetWorkerThread();
195 1 : termPipeline();
196 1 : throw std::runtime_error("Could not set secondary video");
197 : }
198 7 : }
199 : else
200 : {
201 201 : RIALTO_SERVER_LOG_MIL("Primary video playback selected");
202 : }
203 :
204 : m_gstDispatcherThread =
205 208 : gstDispatcherThreadFactory->createGstDispatcherThread(*this, m_context.pipeline, m_gstWrapper);
206 283 : }
207 :
208 416 : GstGenericPlayer::~GstGenericPlayer()
209 : {
210 208 : RIALTO_SERVER_LOG_DEBUG("GstGenericPlayer is destructed.");
211 :
212 208 : m_gstDispatcherThread.reset();
213 :
214 208 : resetWorkerThread();
215 :
216 208 : termPipeline();
217 416 : }
218 :
219 209 : void GstGenericPlayer::initMsePipeline()
220 : {
221 : // Make playbin
222 209 : m_context.pipeline = m_gstWrapper->gstElementFactoryMake("playbin", "media_pipeline");
223 : // Set pipeline flags
224 209 : setPlaybinFlags(true);
225 :
226 : // Set callbacks
227 209 : m_glibWrapper->gSignalConnect(m_context.pipeline, "source-setup", G_CALLBACK(&GstGenericPlayer::setupSource), this);
228 209 : m_glibWrapper->gSignalConnect(m_context.pipeline, "element-setup", G_CALLBACK(&GstGenericPlayer::setupElement), this);
229 209 : m_glibWrapper->gSignalConnect(m_context.pipeline, "deep-element-added",
230 : G_CALLBACK(&GstGenericPlayer::deepElementAdded), this);
231 :
232 : // Set uri
233 209 : m_glibWrapper->gObjectSet(m_context.pipeline, "uri", "rialto://", nullptr);
234 :
235 : // Check playsink
236 209 : GstElement *playsink = (m_gstWrapper->gstBinGetByName(GST_BIN(m_context.pipeline), "playsink"));
237 209 : if (playsink)
238 : {
239 208 : m_glibWrapper->gObjectSet(G_OBJECT(playsink), "send-event-mode", 0, nullptr);
240 208 : m_gstWrapper->gstObjectUnref(playsink);
241 : }
242 : else
243 : {
244 1 : GST_WARNING("No playsink ?!?!?");
245 : }
246 209 : }
247 :
248 210 : void GstGenericPlayer::resetWorkerThread()
249 : {
250 : // Shutdown task thread
251 210 : m_workerThread->enqueueTask(m_taskFactory->createShutdown(*this));
252 210 : m_workerThread->join();
253 210 : m_workerThread.reset();
254 : }
255 :
256 209 : void GstGenericPlayer::termPipeline()
257 : {
258 209 : if (m_finishSourceSetupTimer && m_finishSourceSetupTimer->isActive())
259 : {
260 0 : m_finishSourceSetupTimer->cancel();
261 : }
262 :
263 209 : m_finishSourceSetupTimer.reset();
264 :
265 258 : for (auto &elem : m_context.streamInfo)
266 : {
267 49 : StreamInfo &streamInfo = elem.second;
268 51 : for (auto &buffer : streamInfo.buffers)
269 : {
270 2 : m_gstWrapper->gstBufferUnref(buffer);
271 : }
272 :
273 49 : streamInfo.buffers.clear();
274 : }
275 :
276 209 : m_taskFactory->createStop(m_context, *this)->execute();
277 209 : GstBus *bus = m_gstWrapper->gstPipelineGetBus(GST_PIPELINE(m_context.pipeline));
278 209 : m_gstWrapper->gstBusSetSyncHandler(bus, nullptr, nullptr, nullptr);
279 209 : m_gstWrapper->gstObjectUnref(bus);
280 :
281 209 : if (m_context.source)
282 : {
283 1 : m_gstWrapper->gstObjectUnref(m_context.source);
284 : }
285 209 : if (m_context.subtitleSink)
286 : {
287 4 : m_gstWrapper->gstObjectUnref(m_context.subtitleSink);
288 : }
289 :
290 : // Delete the pipeline
291 209 : m_gstWrapper->gstObjectUnref(m_context.pipeline);
292 : }
293 :
294 837 : unsigned GstGenericPlayer::getGstPlayFlag(const char *nick)
295 : {
296 : GFlagsClass *flagsClass =
297 837 : static_cast<GFlagsClass *>(m_glibWrapper->gTypeClassRef(m_glibWrapper->gTypeFromName("GstPlayFlags")));
298 837 : GFlagsValue *flag = m_glibWrapper->gFlagsGetValueByNick(flagsClass, nick);
299 837 : return flag ? flag->value : 0;
300 : }
301 :
302 1 : void GstGenericPlayer::setupSource(GstElement *pipeline, GstElement *source, GstGenericPlayer *self)
303 : {
304 1 : self->m_gstWrapper->gstObjectRef(source);
305 1 : if (self->m_workerThread)
306 : {
307 1 : self->m_workerThread->enqueueTask(self->m_taskFactory->createSetupSource(self->m_context, *self, source));
308 : }
309 : }
310 :
311 1 : void GstGenericPlayer::setupElement(GstElement *pipeline, GstElement *element, GstGenericPlayer *self)
312 : {
313 1 : RIALTO_SERVER_LOG_DEBUG("Element %s added to the pipeline", GST_ELEMENT_NAME(element));
314 1 : self->m_gstWrapper->gstObjectRef(element);
315 1 : if (self->m_workerThread)
316 : {
317 1 : self->m_workerThread->enqueueTask(self->m_taskFactory->createSetupElement(self->m_context, *self, element));
318 : }
319 : }
320 :
321 1 : void GstGenericPlayer::deepElementAdded(GstBin *pipeline, GstBin *bin, GstElement *element, GstGenericPlayer *self)
322 : {
323 1 : RIALTO_SERVER_LOG_DEBUG("Deep element %s added to the pipeline", GST_ELEMENT_NAME(element));
324 1 : if (self->m_workerThread)
325 : {
326 2 : self->m_workerThread->enqueueTask(
327 2 : self->m_taskFactory->createDeepElementAdded(self->m_context, *self, pipeline, bin, element));
328 : }
329 1 : }
330 :
331 1 : void GstGenericPlayer::attachSource(const std::unique_ptr<IMediaPipeline::MediaSource> &attachedSource)
332 : {
333 1 : if (m_workerThread)
334 : {
335 1 : m_workerThread->enqueueTask(m_taskFactory->createAttachSource(m_context, *this, attachedSource));
336 : }
337 : }
338 :
339 1 : void GstGenericPlayer::removeSource(const MediaSourceType &mediaSourceType)
340 : {
341 1 : if (m_workerThread)
342 : {
343 1 : m_workerThread->enqueueTask(m_taskFactory->createRemoveSource(m_context, *this, mediaSourceType));
344 : }
345 : }
346 :
347 2 : void GstGenericPlayer::allSourcesAttached()
348 : {
349 2 : if (m_workerThread)
350 : {
351 2 : m_workerThread->enqueueTask(m_taskFactory->createFinishSetupSource(m_context, *this));
352 : }
353 : }
354 :
355 1 : void GstGenericPlayer::attachSamples(const IMediaPipeline::MediaSegmentVector &mediaSegments)
356 : {
357 1 : if (m_workerThread)
358 : {
359 1 : m_workerThread->enqueueTask(m_taskFactory->createAttachSamples(m_context, *this, mediaSegments));
360 : }
361 : }
362 :
363 1 : void GstGenericPlayer::attachSamples(const std::shared_ptr<IDataReader> &dataReader)
364 : {
365 1 : if (m_workerThread)
366 : {
367 1 : m_workerThread->enqueueTask(m_taskFactory->createReadShmDataAndAttachSamples(m_context, *this, dataReader));
368 : }
369 : }
370 :
371 1 : void GstGenericPlayer::setPosition(std::int64_t position)
372 : {
373 1 : if (m_workerThread)
374 : {
375 1 : m_workerThread->enqueueTask(m_taskFactory->createSetPosition(m_context, *this, position));
376 : }
377 : }
378 :
379 1 : void GstGenericPlayer::setPlaybackRate(double rate)
380 : {
381 1 : if (m_workerThread)
382 : {
383 1 : m_workerThread->enqueueTask(m_taskFactory->createSetPlaybackRate(m_context, rate));
384 : }
385 : }
386 :
387 4 : bool GstGenericPlayer::getPosition(std::int64_t &position)
388 : {
389 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
390 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
391 4 : if (!m_context.pipeline || GST_STATE(m_context.pipeline) < GST_STATE_PAUSED)
392 : {
393 1 : RIALTO_SERVER_LOG_WARN("GetPosition failed. Pipeline is null or state < PAUSED");
394 1 : return false;
395 : }
396 3 : if (!m_gstWrapper->gstElementQueryPosition(m_context.pipeline, GST_FORMAT_TIME, &position))
397 : {
398 1 : return false;
399 : }
400 2 : return true;
401 : }
402 :
403 38 : GstElement *GstGenericPlayer::getSink(const MediaSourceType &mediaSourceType) const
404 : {
405 38 : const char *kSinkName{nullptr};
406 38 : GstElement *sink{nullptr};
407 38 : switch (mediaSourceType)
408 : {
409 18 : case MediaSourceType::AUDIO:
410 18 : kSinkName = "audio-sink";
411 18 : break;
412 18 : case MediaSourceType::VIDEO:
413 18 : kSinkName = "video-sink";
414 18 : break;
415 2 : default:
416 2 : break;
417 : }
418 38 : if (!kSinkName)
419 : {
420 2 : RIALTO_SERVER_LOG_WARN("mediaSourceType not supported %d", static_cast<int>(mediaSourceType));
421 : }
422 : else
423 : {
424 36 : if (m_context.pipeline == nullptr)
425 : {
426 0 : RIALTO_SERVER_LOG_WARN("Pipeline is NULL!");
427 : }
428 : else
429 : {
430 36 : RIALTO_SERVER_LOG_DEBUG("Pipeline is valid: %p", m_context.pipeline);
431 : }
432 36 : m_glibWrapper->gObjectGet(m_context.pipeline, kSinkName, &sink, nullptr);
433 36 : if (sink)
434 : {
435 25 : GstElement *autoSink{sink};
436 25 : if (firebolt::rialto::MediaSourceType::VIDEO == mediaSourceType)
437 14 : autoSink = getSinkChildIfAutoVideoSink(sink);
438 11 : else if (firebolt::rialto::MediaSourceType::AUDIO == mediaSourceType)
439 11 : autoSink = getSinkChildIfAutoAudioSink(sink);
440 :
441 : // Is this an auto-sink?...
442 25 : if (autoSink != sink)
443 : {
444 2 : m_gstWrapper->gstObjectUnref(GST_OBJECT(sink));
445 :
446 : // increase the reference count of the auto sink
447 2 : sink = GST_ELEMENT(m_gstWrapper->gstObjectRef(GST_OBJECT(autoSink)));
448 : }
449 : }
450 : else
451 : {
452 11 : RIALTO_SERVER_LOG_WARN("%s could not be obtained", kSinkName);
453 : }
454 : }
455 38 : return sink;
456 : }
457 :
458 1 : void GstGenericPlayer::setSourceFlushed(const MediaSourceType &mediaSourceType)
459 : {
460 1 : m_flushWatcher->setFlushed(mediaSourceType);
461 : }
462 :
463 19 : GstElement *GstGenericPlayer::getDecoder(const MediaSourceType &mediaSourceType)
464 : {
465 19 : GstIterator *it = m_gstWrapper->gstBinIterateRecurse(GST_BIN(m_context.pipeline));
466 19 : GValue item = G_VALUE_INIT;
467 19 : gboolean done = FALSE;
468 :
469 28 : while (!done)
470 : {
471 21 : switch (m_gstWrapper->gstIteratorNext(it, &item))
472 : {
473 12 : case GST_ITERATOR_OK:
474 : {
475 12 : GstElement *element = GST_ELEMENT(m_glibWrapper->gValueGetObject(&item));
476 12 : GstElementFactory *factory = m_gstWrapper->gstElementGetFactory(element);
477 :
478 12 : if (factory)
479 : {
480 12 : GstElementFactoryListType type = GST_ELEMENT_FACTORY_TYPE_DECODER;
481 12 : if (mediaSourceType == MediaSourceType::AUDIO)
482 : {
483 12 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO;
484 : }
485 0 : else if (mediaSourceType == MediaSourceType::VIDEO)
486 : {
487 0 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO;
488 : }
489 :
490 12 : if (m_gstWrapper->gstElementFactoryListIsType(factory, type))
491 : {
492 12 : m_glibWrapper->gValueUnset(&item);
493 12 : m_gstWrapper->gstIteratorFree(it);
494 12 : return GST_ELEMENT(m_gstWrapper->gstObjectRef(element));
495 : }
496 : }
497 :
498 0 : m_glibWrapper->gValueUnset(&item);
499 0 : break;
500 : }
501 2 : case GST_ITERATOR_RESYNC:
502 2 : m_gstWrapper->gstIteratorResync(it);
503 2 : break;
504 7 : case GST_ITERATOR_ERROR:
505 : case GST_ITERATOR_DONE:
506 7 : done = TRUE;
507 7 : break;
508 : }
509 : }
510 :
511 7 : RIALTO_SERVER_LOG_WARN("Could not find decoder");
512 :
513 7 : m_glibWrapper->gValueUnset(&item);
514 7 : m_gstWrapper->gstIteratorFree(it);
515 :
516 7 : return nullptr;
517 : }
518 :
519 3 : GstElement *GstGenericPlayer::getParser(const MediaSourceType &mediaSourceType)
520 : {
521 3 : GstIterator *it = m_gstWrapper->gstBinIterateRecurse(GST_BIN(m_context.pipeline));
522 3 : GValue item = G_VALUE_INIT;
523 3 : gboolean done = FALSE;
524 :
525 4 : while (!done)
526 : {
527 3 : switch (m_gstWrapper->gstIteratorNext(it, &item))
528 : {
529 2 : case GST_ITERATOR_OK:
530 : {
531 2 : GstElement *element = GST_ELEMENT(m_glibWrapper->gValueGetObject(&item));
532 2 : GstElementFactory *factory = m_gstWrapper->gstElementGetFactory(element);
533 :
534 2 : if (factory)
535 : {
536 2 : GstElementFactoryListType type = GST_ELEMENT_FACTORY_TYPE_PARSER;
537 2 : if (mediaSourceType == MediaSourceType::AUDIO)
538 : {
539 0 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO;
540 : }
541 2 : else if (mediaSourceType == MediaSourceType::VIDEO)
542 : {
543 2 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO;
544 : }
545 :
546 2 : if (m_gstWrapper->gstElementFactoryListIsType(factory, type))
547 : {
548 2 : m_glibWrapper->gValueUnset(&item);
549 2 : m_gstWrapper->gstIteratorFree(it);
550 2 : return GST_ELEMENT(m_gstWrapper->gstObjectRef(element));
551 : }
552 : }
553 :
554 0 : m_glibWrapper->gValueUnset(&item);
555 0 : break;
556 : }
557 0 : case GST_ITERATOR_RESYNC:
558 0 : m_gstWrapper->gstIteratorResync(it);
559 0 : break;
560 1 : case GST_ITERATOR_ERROR:
561 : case GST_ITERATOR_DONE:
562 1 : done = TRUE;
563 1 : break;
564 : }
565 : }
566 :
567 1 : RIALTO_SERVER_LOG_WARN("Could not find parser");
568 :
569 1 : m_glibWrapper->gValueUnset(&item);
570 1 : m_gstWrapper->gstIteratorFree(it);
571 :
572 1 : return nullptr;
573 : }
574 :
575 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate>
576 5 : GstGenericPlayer::createAudioAttributes(const std::unique_ptr<IMediaPipeline::MediaSource> &source) const
577 : {
578 5 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate> audioAttributes;
579 5 : const IMediaPipeline::MediaSourceAudio *kSource = dynamic_cast<IMediaPipeline::MediaSourceAudio *>(source.get());
580 5 : if (kSource)
581 : {
582 4 : firebolt::rialto::AudioConfig audioConfig = kSource->getAudioConfig();
583 : audioAttributes =
584 12 : firebolt::rialto::wrappers::AudioAttributesPrivate{"", // param set below.
585 4 : audioConfig.numberOfChannels, audioConfig.sampleRate,
586 : 0, // used only in one of logs in rdk_gstreamer_utils, no
587 : // need to set this param.
588 : 0, // used only in one of logs in rdk_gstreamer_utils, no
589 : // need to set this param.
590 4 : audioConfig.codecSpecificConfig.data(),
591 : static_cast<std::uint32_t>(
592 4 : audioConfig.codecSpecificConfig.size())};
593 4 : if (source->getMimeType() == "audio/mp4" || source->getMimeType() == "audio/aac")
594 : {
595 2 : audioAttributes->m_codecParam = "mp4a";
596 : }
597 2 : else if (source->getMimeType() == "audio/x-eac3")
598 : {
599 1 : audioAttributes->m_codecParam = "ec-3";
600 : }
601 1 : else if (source->getMimeType() == "audio/b-wav" || source->getMimeType() == "audio/x-raw")
602 : {
603 1 : audioAttributes->m_codecParam = "lpcm";
604 : }
605 4 : }
606 : else
607 : {
608 1 : RIALTO_SERVER_LOG_ERROR("Failed to cast source");
609 : }
610 :
611 5 : return audioAttributes;
612 : }
613 :
614 1 : bool GstGenericPlayer::setImmediateOutput(const MediaSourceType &mediaSourceType, bool immediateOutputParam)
615 : {
616 1 : if (!m_workerThread)
617 0 : return false;
618 :
619 2 : m_workerThread->enqueueTask(
620 2 : m_taskFactory->createSetImmediateOutput(m_context, *this, mediaSourceType, immediateOutputParam));
621 1 : return true;
622 : }
623 :
624 5 : bool GstGenericPlayer::getImmediateOutput(const MediaSourceType &mediaSourceType, bool &immediateOutputRef)
625 : {
626 5 : bool returnValue{false};
627 5 : GstElement *sink{getSink(mediaSourceType)};
628 5 : if (sink)
629 : {
630 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "immediate-output"))
631 : {
632 2 : m_glibWrapper->gObjectGet(sink, "immediate-output", &immediateOutputRef, nullptr);
633 2 : returnValue = true;
634 : }
635 : else
636 : {
637 1 : RIALTO_SERVER_LOG_ERROR("immediate-output not supported in element %s", GST_ELEMENT_NAME(sink));
638 : }
639 3 : m_gstWrapper->gstObjectUnref(sink);
640 : }
641 : else
642 : {
643 2 : RIALTO_SERVER_LOG_ERROR("Failed to set immediate-output property, sink is NULL");
644 : }
645 :
646 5 : return returnValue;
647 : }
648 :
649 5 : bool GstGenericPlayer::getStats(const MediaSourceType &mediaSourceType, uint64_t &renderedFrames, uint64_t &droppedFrames)
650 : {
651 5 : bool returnValue{false};
652 5 : GstElement *sink{getSink(mediaSourceType)};
653 5 : if (sink)
654 : {
655 3 : GstStructure *stats{nullptr};
656 3 : m_glibWrapper->gObjectGet(sink, "stats", &stats, nullptr);
657 3 : if (!stats)
658 : {
659 1 : RIALTO_SERVER_LOG_ERROR("failed to get stats from '%s'", GST_ELEMENT_NAME(sink));
660 : }
661 : else
662 : {
663 : guint64 renderedFramesTmp;
664 : guint64 droppedFramesTmp;
665 3 : if (m_gstWrapper->gstStructureGetUint64(stats, "rendered", &renderedFramesTmp) &&
666 1 : m_gstWrapper->gstStructureGetUint64(stats, "dropped", &droppedFramesTmp))
667 : {
668 1 : renderedFrames = renderedFramesTmp;
669 1 : droppedFrames = droppedFramesTmp;
670 1 : returnValue = true;
671 : }
672 : else
673 : {
674 1 : RIALTO_SERVER_LOG_ERROR("failed to get 'rendered' or 'dropped' from structure (%s)",
675 : GST_ELEMENT_NAME(sink));
676 : }
677 2 : m_gstWrapper->gstStructureFree(stats);
678 : }
679 3 : m_gstWrapper->gstObjectUnref(sink);
680 : }
681 :
682 5 : return returnValue;
683 : }
684 :
685 4 : GstBuffer *GstGenericPlayer::createBuffer(const IMediaPipeline::MediaSegment &mediaSegment) const
686 : {
687 4 : GstBuffer *gstBuffer = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getDataLength(), nullptr);
688 4 : m_gstWrapper->gstBufferFill(gstBuffer, 0, mediaSegment.getData(), mediaSegment.getDataLength());
689 :
690 4 : if (mediaSegment.isEncrypted())
691 : {
692 3 : GstBuffer *keyId = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getKeyId().size(), nullptr);
693 3 : m_gstWrapper->gstBufferFill(keyId, 0, mediaSegment.getKeyId().data(), mediaSegment.getKeyId().size());
694 :
695 3 : GstBuffer *initVector = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getInitVector().size(), nullptr);
696 6 : m_gstWrapper->gstBufferFill(initVector, 0, mediaSegment.getInitVector().data(),
697 3 : mediaSegment.getInitVector().size());
698 3 : GstBuffer *subsamples{nullptr};
699 3 : if (!mediaSegment.getSubSamples().empty())
700 : {
701 3 : auto subsamplesRawSize = mediaSegment.getSubSamples().size() * (sizeof(guint16) + sizeof(guint32));
702 3 : guint8 *subsamplesRaw = static_cast<guint8 *>(m_glibWrapper->gMalloc(subsamplesRawSize));
703 : GstByteWriter writer;
704 3 : m_gstWrapper->gstByteWriterInitWithData(&writer, subsamplesRaw, subsamplesRawSize, FALSE);
705 :
706 6 : for (const auto &subSample : mediaSegment.getSubSamples())
707 : {
708 3 : m_gstWrapper->gstByteWriterPutUint16Be(&writer, subSample.numClearBytes);
709 3 : m_gstWrapper->gstByteWriterPutUint32Be(&writer, subSample.numEncryptedBytes);
710 : }
711 3 : subsamples = m_gstWrapper->gstBufferNewWrapped(subsamplesRaw, subsamplesRawSize);
712 : }
713 :
714 3 : uint32_t crypt = 0;
715 3 : uint32_t skip = 0;
716 3 : bool encryptionPatternSet = mediaSegment.getEncryptionPattern(crypt, skip);
717 :
718 3 : GstRialtoProtectionData data = {mediaSegment.getMediaKeySessionId(),
719 3 : static_cast<uint32_t>(mediaSegment.getSubSamples().size()),
720 3 : mediaSegment.getInitWithLast15(),
721 : keyId,
722 : initVector,
723 : subsamples,
724 6 : mediaSegment.getCipherMode(),
725 : crypt,
726 : skip,
727 : encryptionPatternSet,
728 6 : m_context.decryptionService};
729 :
730 3 : if (!m_protectionMetadataWrapper->addProtectionMetadata(gstBuffer, data))
731 : {
732 1 : RIALTO_SERVER_LOG_ERROR("Failed to add protection metadata");
733 1 : if (keyId)
734 : {
735 1 : m_gstWrapper->gstBufferUnref(keyId);
736 : }
737 1 : if (initVector)
738 : {
739 1 : m_gstWrapper->gstBufferUnref(initVector);
740 : }
741 1 : if (subsamples)
742 : {
743 1 : m_gstWrapper->gstBufferUnref(subsamples);
744 : }
745 : }
746 : }
747 :
748 4 : GST_BUFFER_TIMESTAMP(gstBuffer) = mediaSegment.getTimeStamp();
749 4 : GST_BUFFER_DURATION(gstBuffer) = mediaSegment.getDuration();
750 4 : return gstBuffer;
751 : }
752 :
753 4 : void GstGenericPlayer::notifyNeedMediaData(const MediaSourceType mediaSource)
754 : {
755 4 : auto elem = m_context.streamInfo.find(mediaSource);
756 4 : if (elem != m_context.streamInfo.end())
757 : {
758 2 : StreamInfo &streamInfo = elem->second;
759 2 : streamInfo.isNeedDataPending = false;
760 :
761 : // Send new NeedMediaData if we still need it
762 2 : if (m_gstPlayerClient && streamInfo.isDataNeeded)
763 : {
764 2 : streamInfo.isNeedDataPending = m_gstPlayerClient->notifyNeedMediaData(mediaSource);
765 : }
766 : }
767 : else
768 : {
769 2 : RIALTO_SERVER_LOG_WARN("Media type %s could not be found", common::convertMediaSourceType(mediaSource));
770 : }
771 4 : }
772 :
773 19 : void GstGenericPlayer::attachData(const firebolt::rialto::MediaSourceType mediaType)
774 : {
775 19 : auto elem = m_context.streamInfo.find(mediaType);
776 19 : if (elem != m_context.streamInfo.end())
777 : {
778 16 : StreamInfo &streamInfo = elem->second;
779 16 : if (streamInfo.buffers.empty() || !streamInfo.isDataNeeded)
780 : {
781 2 : return;
782 : }
783 :
784 14 : if (firebolt::rialto::MediaSourceType::SUBTITLE == mediaType)
785 : {
786 2 : setTextTrackPositionIfRequired(streamInfo.appSrc);
787 : }
788 : else
789 : {
790 36 : pushSampleIfRequired(streamInfo.appSrc, common::convertMediaSourceType(mediaType));
791 : }
792 14 : if (mediaType == firebolt::rialto::MediaSourceType::AUDIO)
793 : {
794 : // This needs to be done before gstAppSrcPushBuffer() is
795 : // called because it can free the memory
796 7 : m_context.lastAudioSampleTimestamps = static_cast<int64_t>(GST_BUFFER_PTS(streamInfo.buffers.back()));
797 : }
798 :
799 28 : for (GstBuffer *buffer : streamInfo.buffers)
800 : {
801 14 : m_gstWrapper->gstAppSrcPushBuffer(GST_APP_SRC(streamInfo.appSrc), buffer);
802 : }
803 14 : streamInfo.buffers.clear();
804 14 : streamInfo.isDataPushed = true;
805 :
806 14 : const bool kIsSingle = m_context.streamInfo.size() == 1;
807 14 : bool allOtherStreamsPushed = std::all_of(m_context.streamInfo.begin(), m_context.streamInfo.end(),
808 15 : [](const auto &entry) { return entry.second.isDataPushed; });
809 :
810 14 : if (!m_context.bufferedNotificationSent && (allOtherStreamsPushed || kIsSingle) && m_gstPlayerClient)
811 : {
812 1 : m_context.bufferedNotificationSent = true;
813 1 : m_gstPlayerClient->notifyNetworkState(NetworkState::BUFFERED);
814 : }
815 14 : cancelUnderflow(mediaType);
816 :
817 14 : const auto eosInfoIt = m_context.endOfStreamInfo.find(mediaType);
818 14 : if (eosInfoIt != m_context.endOfStreamInfo.end() && eosInfoIt->second == EosState::PENDING)
819 : {
820 0 : setEos(mediaType);
821 : }
822 : }
823 : }
824 :
825 7 : void GstGenericPlayer::updateAudioCaps(int32_t rate, int32_t channels, const std::shared_ptr<CodecData> &codecData)
826 : {
827 7 : auto elem = m_context.streamInfo.find(firebolt::rialto::MediaSourceType::AUDIO);
828 7 : if (elem != m_context.streamInfo.end())
829 : {
830 6 : StreamInfo &streamInfo = elem->second;
831 :
832 6 : constexpr int kInvalidRate{0}, kInvalidChannels{0};
833 6 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(streamInfo.appSrc));
834 6 : GstCaps *newCaps = m_gstWrapper->gstCapsCopy(currentCaps);
835 :
836 6 : if (rate != kInvalidRate)
837 : {
838 3 : m_gstWrapper->gstCapsSetSimple(newCaps, "rate", G_TYPE_INT, rate, NULL);
839 : }
840 :
841 6 : if (channels != kInvalidChannels)
842 : {
843 3 : m_gstWrapper->gstCapsSetSimple(newCaps, "channels", G_TYPE_INT, channels, NULL);
844 : }
845 :
846 6 : setCodecData(newCaps, codecData);
847 :
848 6 : if (!m_gstWrapper->gstCapsIsEqual(currentCaps, newCaps))
849 : {
850 5 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(streamInfo.appSrc), newCaps);
851 : }
852 :
853 6 : m_gstWrapper->gstCapsUnref(newCaps);
854 6 : m_gstWrapper->gstCapsUnref(currentCaps);
855 : }
856 7 : }
857 :
858 8 : void GstGenericPlayer::updateVideoCaps(int32_t width, int32_t height, Fraction frameRate,
859 : const std::shared_ptr<CodecData> &codecData)
860 : {
861 8 : auto elem = m_context.streamInfo.find(firebolt::rialto::MediaSourceType::VIDEO);
862 8 : if (elem != m_context.streamInfo.end())
863 : {
864 7 : StreamInfo &streamInfo = elem->second;
865 :
866 7 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(streamInfo.appSrc));
867 7 : GstCaps *newCaps = m_gstWrapper->gstCapsCopy(currentCaps);
868 :
869 7 : if (width > 0)
870 : {
871 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "width", G_TYPE_INT, width, NULL);
872 : }
873 :
874 7 : if (height > 0)
875 : {
876 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "height", G_TYPE_INT, height, NULL);
877 : }
878 :
879 7 : if ((kUndefinedSize != frameRate.numerator) && (kUndefinedSize != frameRate.denominator))
880 : {
881 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "framerate", GST_TYPE_FRACTION, frameRate.numerator,
882 : frameRate.denominator, NULL);
883 : }
884 :
885 7 : setCodecData(newCaps, codecData);
886 :
887 7 : if (!m_gstWrapper->gstCapsIsEqual(currentCaps, newCaps))
888 : {
889 6 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(streamInfo.appSrc), newCaps);
890 : }
891 :
892 7 : m_gstWrapper->gstCapsUnref(currentCaps);
893 7 : m_gstWrapper->gstCapsUnref(newCaps);
894 : }
895 8 : }
896 :
897 5 : void GstGenericPlayer::addAudioClippingToBuffer(GstBuffer *buffer, uint64_t clippingStart, uint64_t clippingEnd) const
898 : {
899 5 : if (clippingStart || clippingEnd)
900 : {
901 4 : if (m_gstWrapper->gstBufferAddAudioClippingMeta(buffer, GST_FORMAT_TIME, clippingStart, clippingEnd))
902 : {
903 3 : RIALTO_SERVER_LOG_DEBUG("Added audio clipping to buffer %p, start: %" PRIu64 ", end %" PRIu64, buffer,
904 : clippingStart, clippingEnd);
905 : }
906 : else
907 : {
908 1 : RIALTO_SERVER_LOG_WARN("Failed to add audio clipping to buffer %p, start: %" PRIu64 ", end %" PRIu64,
909 : buffer, clippingStart, clippingEnd);
910 : }
911 : }
912 5 : }
913 :
914 13 : bool GstGenericPlayer::setCodecData(GstCaps *caps, const std::shared_ptr<CodecData> &codecData) const
915 : {
916 13 : if (codecData && CodecDataType::BUFFER == codecData->type)
917 : {
918 7 : gpointer memory = m_glibWrapper->gMemdup(codecData->data.data(), codecData->data.size());
919 7 : GstBuffer *buf = m_gstWrapper->gstBufferNewWrapped(memory, codecData->data.size());
920 7 : m_gstWrapper->gstCapsSetSimple(caps, "codec_data", GST_TYPE_BUFFER, buf, nullptr);
921 7 : m_gstWrapper->gstBufferUnref(buf);
922 7 : return true;
923 : }
924 6 : if (codecData && CodecDataType::STRING == codecData->type)
925 : {
926 2 : std::string codecDataStr(codecData->data.begin(), codecData->data.end());
927 2 : m_gstWrapper->gstCapsSetSimple(caps, "codec_data", G_TYPE_STRING, codecDataStr.c_str(), nullptr);
928 2 : return true;
929 : }
930 4 : return false;
931 : }
932 :
933 12 : void GstGenericPlayer::pushSampleIfRequired(GstElement *source, const std::string &typeStr)
934 : {
935 12 : auto initialPosition = m_context.initialPositions.find(source);
936 12 : if (m_context.initialPositions.end() == initialPosition)
937 : {
938 : // Sending initial sample not needed
939 7 : return;
940 : }
941 : // GstAppSrc does not replace segment, if it's the same as previous one.
942 : // It causes problems with position reporing in amlogic devices, so we need to push
943 : // two segments with different reset time value.
944 5 : pushAdditionalSegmentIfRequired(source);
945 :
946 10 : for (const auto &[position, resetTime, appliedRate, stopPosition] : initialPosition->second)
947 : {
948 6 : GstSeekFlags seekFlag = resetTime ? GST_SEEK_FLAG_FLUSH : GST_SEEK_FLAG_NONE;
949 6 : RIALTO_SERVER_LOG_DEBUG("Pushing new %s sample...", typeStr.c_str());
950 6 : GstSegment *segment{m_gstWrapper->gstSegmentNew()};
951 6 : m_gstWrapper->gstSegmentInit(segment, GST_FORMAT_TIME);
952 6 : if (!m_gstWrapper->gstSegmentDoSeek(segment, m_context.playbackRate, GST_FORMAT_TIME, seekFlag,
953 : GST_SEEK_TYPE_SET, position, GST_SEEK_TYPE_SET, stopPosition, nullptr))
954 : {
955 1 : RIALTO_SERVER_LOG_WARN("Segment seek failed.");
956 1 : m_gstWrapper->gstSegmentFree(segment);
957 1 : m_context.initialPositions.erase(initialPosition);
958 1 : return;
959 : }
960 5 : segment->applied_rate = appliedRate;
961 5 : RIALTO_SERVER_LOG_MIL("New %s segment: [%" GST_TIME_FORMAT ", %" GST_TIME_FORMAT
962 : "], rate: %f, appliedRate %f, reset_time: %d\n",
963 : typeStr.c_str(), GST_TIME_ARGS(segment->start), GST_TIME_ARGS(segment->stop),
964 : segment->rate, segment->applied_rate, resetTime);
965 :
966 5 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(source));
967 : // We can't pass buffer in GstSample, because implementation of gst_app_src_push_sample
968 : // uses gst_buffer_copy, which loses RialtoProtectionMeta (that causes problems with EME
969 : // for first frame).
970 5 : GstSample *sample = m_gstWrapper->gstSampleNew(nullptr, currentCaps, segment, nullptr);
971 5 : m_gstWrapper->gstAppSrcPushSample(GST_APP_SRC(source), sample);
972 5 : m_gstWrapper->gstSampleUnref(sample);
973 5 : m_gstWrapper->gstCapsUnref(currentCaps);
974 :
975 5 : m_gstWrapper->gstSegmentFree(segment);
976 : }
977 4 : m_context.currentPosition[source] = initialPosition->second.back();
978 4 : m_context.initialPositions.erase(initialPosition);
979 4 : return;
980 : }
981 :
982 5 : void GstGenericPlayer::pushAdditionalSegmentIfRequired(GstElement *source)
983 : {
984 5 : auto currentPosition = m_context.currentPosition.find(source);
985 5 : if (m_context.currentPosition.end() == currentPosition)
986 : {
987 4 : return;
988 : }
989 1 : auto initialPosition = m_context.initialPositions.find(source);
990 1 : if (m_context.initialPositions.end() == initialPosition)
991 : {
992 0 : return;
993 : }
994 2 : if (initialPosition->second.size() == 1 && initialPosition->second.back().resetTime &&
995 1 : currentPosition->second == initialPosition->second.back())
996 : {
997 1 : RIALTO_SERVER_LOG_INFO("Adding additional segment with reset_time = false");
998 1 : SegmentData additionalSegment = initialPosition->second.back();
999 1 : additionalSegment.resetTime = false;
1000 1 : initialPosition->second.push_back(additionalSegment);
1001 : }
1002 : }
1003 :
1004 2 : void GstGenericPlayer::setTextTrackPositionIfRequired(GstElement *source)
1005 : {
1006 2 : auto initialPosition = m_context.initialPositions.find(source);
1007 2 : if (m_context.initialPositions.end() == initialPosition)
1008 : {
1009 : // Sending initial sample not needed
1010 1 : return;
1011 : }
1012 :
1013 1 : m_glibWrapper->gObjectSet(m_context.subtitleSink, "position",
1014 1 : static_cast<guint64>(initialPosition->second.back().position), nullptr);
1015 :
1016 1 : m_context.initialPositions.erase(initialPosition);
1017 : }
1018 :
1019 7 : bool GstGenericPlayer::reattachSource(const std::unique_ptr<IMediaPipeline::MediaSource> &source)
1020 : {
1021 7 : if (m_context.streamInfo.find(source->getType()) == m_context.streamInfo.end())
1022 : {
1023 1 : RIALTO_SERVER_LOG_ERROR("Unable to switch source, type does not exist");
1024 1 : return false;
1025 : }
1026 6 : if (source->getMimeType().empty())
1027 : {
1028 1 : RIALTO_SERVER_LOG_WARN("Skip switch audio source. Unknown mime type");
1029 1 : return false;
1030 : }
1031 5 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate> audioAttributes{createAudioAttributes(source)};
1032 5 : if (!audioAttributes)
1033 : {
1034 1 : RIALTO_SERVER_LOG_ERROR("Failed to create audio attributes");
1035 1 : return false;
1036 : }
1037 : std::int64_t currentDispPts64b; // In netflix code it's currentDisplayPosition + offset
1038 4 : m_gstWrapper->gstElementQueryPosition(m_context.pipeline, GST_FORMAT_TIME, ¤tDispPts64b);
1039 4 : long long currentDispPts = currentDispPts64b; // NOLINT(runtime/int)
1040 4 : GstCaps *caps{createCapsFromMediaSource(m_gstWrapper, m_glibWrapper, source)};
1041 4 : GstAppSrc *appSrc{GST_APP_SRC(m_context.streamInfo[source->getType()].appSrc)};
1042 4 : GstCaps *oldCaps = m_gstWrapper->gstAppSrcGetCaps(appSrc);
1043 4 : if ((!oldCaps) || (!m_gstWrapper->gstCapsIsEqual(caps, oldCaps)))
1044 : {
1045 3 : RIALTO_SERVER_LOG_DEBUG("Caps not equal. Perform audio track codec channel switch.");
1046 3 : int sampleAttributes{
1047 : 0}; // rdk_gstreamer_utils::performAudioTrackCodecChannelSwitch checks if this param != NULL only.
1048 3 : std::uint32_t status{0}; // must be 0 to make rdk_gstreamer_utils::performAudioTrackCodecChannelSwitch work
1049 3 : unsigned int ui32Delay{0}; // output param
1050 3 : long long audioChangeTargetPts{-1}; // NOLINT(runtime/int) output param. Set audioChangeTargetPts =
1051 : // currentDispPts in rdk_gstreamer_utils function stub
1052 3 : unsigned int audioChangeStage{0}; // Output param. Set to AUDCHG_ALIGN in rdk_gstreamer_utils function stub
1053 3 : gchar *oldCapsCStr = m_gstWrapper->gstCapsToString(oldCaps);
1054 3 : std::string oldCapsStr = std::string(oldCapsCStr);
1055 3 : m_glibWrapper->gFree(oldCapsCStr);
1056 3 : bool audioAac{oldCapsStr.find("audio/mpeg") != std::string::npos};
1057 3 : bool svpEnabled{true}; // assume always true
1058 3 : bool retVal{false}; // Output param. Set to TRUE in rdk_gstreamer_utils function stub
1059 : bool result =
1060 3 : m_rdkGstreamerUtilsWrapper
1061 6 : ->performAudioTrackCodecChannelSwitch(&m_context.playbackGroup, &sampleAttributes, &(*audioAttributes),
1062 : &status, &ui32Delay, &audioChangeTargetPts, ¤tDispPts,
1063 : &audioChangeStage,
1064 : &caps, // may fail for amlogic - that implementation changes
1065 : // this parameter, it's probably used by Netflix later
1066 3 : &audioAac, svpEnabled, GST_ELEMENT(appSrc), &retVal);
1067 :
1068 3 : if (!result || !retVal)
1069 : {
1070 3 : RIALTO_SERVER_LOG_WARN("performAudioTrackCodecChannelSwitch failed! Result: %d, retval %d", result, retVal);
1071 : }
1072 : }
1073 : else
1074 : {
1075 1 : RIALTO_SERVER_LOG_DEBUG("Skip switching audio source - caps are the same.");
1076 : }
1077 :
1078 4 : m_context.lastAudioSampleTimestamps = currentDispPts;
1079 4 : if (caps)
1080 4 : m_gstWrapper->gstCapsUnref(caps);
1081 4 : if (oldCaps)
1082 4 : m_gstWrapper->gstCapsUnref(oldCaps);
1083 :
1084 4 : return true;
1085 5 : }
1086 :
1087 88 : void GstGenericPlayer::scheduleNeedMediaData(GstAppSrc *src)
1088 : {
1089 88 : if (m_workerThread)
1090 : {
1091 88 : m_workerThread->enqueueTask(m_taskFactory->createNeedData(m_context, *this, src));
1092 : }
1093 : }
1094 :
1095 1 : void GstGenericPlayer::scheduleEnoughData(GstAppSrc *src)
1096 : {
1097 1 : if (m_workerThread)
1098 : {
1099 1 : m_workerThread->enqueueTask(m_taskFactory->createEnoughData(m_context, src));
1100 : }
1101 : }
1102 :
1103 3 : void GstGenericPlayer::scheduleAudioUnderflow()
1104 : {
1105 3 : if (m_workerThread)
1106 : {
1107 3 : bool underflowEnabled = m_context.isPlaying && !m_context.audioSourceRemoved;
1108 6 : m_workerThread->enqueueTask(
1109 6 : m_taskFactory->createUnderflow(m_context, *this, underflowEnabled, MediaSourceType::AUDIO));
1110 : }
1111 3 : }
1112 :
1113 2 : void GstGenericPlayer::scheduleVideoUnderflow()
1114 : {
1115 2 : if (m_workerThread)
1116 : {
1117 2 : bool underflowEnabled = m_context.isPlaying;
1118 4 : m_workerThread->enqueueTask(
1119 4 : m_taskFactory->createUnderflow(m_context, *this, underflowEnabled, MediaSourceType::VIDEO));
1120 : }
1121 2 : }
1122 :
1123 1 : void GstGenericPlayer::scheduleAllSourcesAttached()
1124 : {
1125 1 : allSourcesAttached();
1126 : }
1127 :
1128 14 : void GstGenericPlayer::cancelUnderflow(firebolt::rialto::MediaSourceType mediaSource)
1129 : {
1130 14 : auto elem = m_context.streamInfo.find(mediaSource);
1131 14 : if (elem != m_context.streamInfo.end())
1132 : {
1133 14 : StreamInfo &streamInfo = elem->second;
1134 14 : if (!streamInfo.underflowOccured)
1135 : {
1136 11 : return;
1137 : }
1138 :
1139 3 : RIALTO_SERVER_LOG_DEBUG("Cancelling %s underflow", common::convertMediaSourceType(mediaSource));
1140 3 : streamInfo.underflowOccured = false;
1141 : }
1142 : }
1143 :
1144 1 : void GstGenericPlayer::play()
1145 : {
1146 1 : if (m_workerThread)
1147 : {
1148 1 : m_workerThread->enqueueTask(m_taskFactory->createPlay(*this));
1149 : }
1150 : }
1151 :
1152 1 : void GstGenericPlayer::pause()
1153 : {
1154 1 : if (m_workerThread)
1155 : {
1156 1 : m_workerThread->enqueueTask(m_taskFactory->createPause(m_context, *this));
1157 : }
1158 : }
1159 :
1160 1 : void GstGenericPlayer::stop()
1161 : {
1162 1 : if (m_workerThread)
1163 : {
1164 1 : m_workerThread->enqueueTask(m_taskFactory->createStop(m_context, *this));
1165 : }
1166 : }
1167 :
1168 4 : bool GstGenericPlayer::changePipelineState(GstState newState)
1169 : {
1170 4 : if (!m_context.pipeline)
1171 : {
1172 1 : RIALTO_SERVER_LOG_ERROR("Change state failed - pipeline is nullptr");
1173 1 : if (m_gstPlayerClient)
1174 1 : m_gstPlayerClient->notifyPlaybackState(PlaybackState::FAILURE);
1175 1 : return false;
1176 : }
1177 3 : if (m_gstWrapper->gstElementSetState(m_context.pipeline, newState) == GST_STATE_CHANGE_FAILURE)
1178 : {
1179 1 : RIALTO_SERVER_LOG_ERROR("Change state failed - Gstreamer returned an error");
1180 1 : if (m_gstPlayerClient)
1181 1 : m_gstPlayerClient->notifyPlaybackState(PlaybackState::FAILURE);
1182 1 : return false;
1183 : }
1184 2 : return true;
1185 : }
1186 :
1187 1 : void GstGenericPlayer::setVideoGeometry(int x, int y, int width, int height)
1188 : {
1189 1 : if (m_workerThread)
1190 : {
1191 2 : m_workerThread->enqueueTask(
1192 2 : m_taskFactory->createSetVideoGeometry(m_context, *this, Rectangle{x, y, width, height}));
1193 : }
1194 1 : }
1195 :
1196 1 : void GstGenericPlayer::setEos(const firebolt::rialto::MediaSourceType &type)
1197 : {
1198 1 : if (m_workerThread)
1199 : {
1200 1 : m_workerThread->enqueueTask(m_taskFactory->createEos(m_context, *this, type));
1201 : }
1202 : }
1203 :
1204 4 : bool GstGenericPlayer::setVideoSinkRectangle()
1205 : {
1206 4 : bool result = false;
1207 4 : GstElement *videoSink{getSink(MediaSourceType::VIDEO)};
1208 4 : if (videoSink)
1209 : {
1210 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "rectangle"))
1211 : {
1212 : std::string rect =
1213 4 : std::to_string(m_context.pendingGeometry.x) + ',' + std::to_string(m_context.pendingGeometry.y) + ',' +
1214 6 : std::to_string(m_context.pendingGeometry.width) + ',' + std::to_string(m_context.pendingGeometry.height);
1215 2 : m_glibWrapper->gObjectSet(videoSink, "rectangle", rect.c_str(), nullptr);
1216 2 : m_context.pendingGeometry.clear();
1217 2 : result = true;
1218 : }
1219 : else
1220 : {
1221 1 : RIALTO_SERVER_LOG_ERROR("Failed to set the video rectangle");
1222 : }
1223 3 : m_gstWrapper->gstObjectUnref(videoSink);
1224 : }
1225 :
1226 4 : return result;
1227 : }
1228 :
1229 3 : bool GstGenericPlayer::setImmediateOutput()
1230 : {
1231 3 : bool result{false};
1232 3 : if (m_context.pendingImmediateOutputForVideo.has_value())
1233 : {
1234 3 : GstElement *sink{getSink(MediaSourceType::VIDEO)};
1235 3 : if (sink)
1236 : {
1237 2 : bool immediateOutput{m_context.pendingImmediateOutputForVideo.value()};
1238 2 : RIALTO_SERVER_LOG_DEBUG("Set immediate-output to %s", immediateOutput ? "TRUE" : "FALSE");
1239 :
1240 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "immediate-output"))
1241 : {
1242 1 : gboolean immediateOutputGboolean{immediateOutput ? TRUE : FALSE};
1243 1 : m_glibWrapper->gObjectSet(sink, "immediate-output", immediateOutputGboolean, nullptr);
1244 1 : result = true;
1245 : }
1246 : else
1247 : {
1248 1 : RIALTO_SERVER_LOG_ERROR("Failed to set immediate-output property on sink '%s'", GST_ELEMENT_NAME(sink));
1249 : }
1250 2 : m_context.pendingImmediateOutputForVideo.reset();
1251 2 : m_gstWrapper->gstObjectUnref(sink);
1252 : }
1253 : else
1254 : {
1255 1 : RIALTO_SERVER_LOG_DEBUG("Pending an immediate-output, sink is NULL");
1256 : }
1257 : }
1258 3 : return result;
1259 : }
1260 :
1261 4 : bool GstGenericPlayer::setShowVideoWindow()
1262 : {
1263 4 : if (!m_context.pendingShowVideoWindow.has_value())
1264 : {
1265 1 : RIALTO_SERVER_LOG_WARN("No show video window value to be set. Aborting...");
1266 1 : return false;
1267 : }
1268 :
1269 3 : GstElement *videoSink{getSink(MediaSourceType::VIDEO)};
1270 3 : if (!videoSink)
1271 : {
1272 1 : RIALTO_SERVER_LOG_DEBUG("Setting show video window queued. Video sink is NULL");
1273 1 : return false;
1274 : }
1275 2 : bool result{false};
1276 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "show-video-window"))
1277 : {
1278 1 : m_glibWrapper->gObjectSet(videoSink, "show-video-window", m_context.pendingShowVideoWindow.value(), nullptr);
1279 1 : result = true;
1280 : }
1281 : else
1282 : {
1283 1 : RIALTO_SERVER_LOG_ERROR("Setting show video window failed. Property does not exist");
1284 : }
1285 2 : m_context.pendingShowVideoWindow.reset();
1286 2 : m_gstWrapper->gstObjectUnref(GST_OBJECT(videoSink));
1287 2 : return result;
1288 : }
1289 :
1290 4 : bool GstGenericPlayer::setLowLatency()
1291 : {
1292 4 : bool result{false};
1293 4 : if (m_context.pendingLowLatency.has_value())
1294 : {
1295 4 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1296 4 : if (sink)
1297 : {
1298 3 : bool lowLatency{m_context.pendingLowLatency.value()};
1299 3 : RIALTO_SERVER_LOG_DEBUG("Set low-latency to %s", lowLatency ? "TRUE" : "FALSE");
1300 :
1301 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "low-latency"))
1302 : {
1303 2 : gboolean lowLatencyGboolean{lowLatency ? TRUE : FALSE};
1304 2 : m_glibWrapper->gObjectSet(sink, "low-latency", lowLatencyGboolean, nullptr);
1305 2 : result = true;
1306 : }
1307 : else
1308 : {
1309 1 : RIALTO_SERVER_LOG_ERROR("Failed to set low-latency property on sink '%s'", GST_ELEMENT_NAME(sink));
1310 : }
1311 3 : m_context.pendingLowLatency.reset();
1312 3 : m_gstWrapper->gstObjectUnref(sink);
1313 : }
1314 : else
1315 : {
1316 1 : RIALTO_SERVER_LOG_DEBUG("Pending low-latency, sink is NULL");
1317 : }
1318 : }
1319 4 : return result;
1320 : }
1321 :
1322 3 : bool GstGenericPlayer::setSync()
1323 : {
1324 3 : bool result{false};
1325 3 : if (m_context.pendingSync.has_value())
1326 : {
1327 3 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1328 3 : if (sink)
1329 : {
1330 2 : bool sync{m_context.pendingSync.value()};
1331 2 : RIALTO_SERVER_LOG_DEBUG("Set sync to %s", sync ? "TRUE" : "FALSE");
1332 :
1333 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "sync"))
1334 : {
1335 1 : gboolean syncGboolean{sync ? TRUE : FALSE};
1336 1 : m_glibWrapper->gObjectSet(sink, "sync", syncGboolean, nullptr);
1337 1 : result = true;
1338 : }
1339 : else
1340 : {
1341 1 : RIALTO_SERVER_LOG_ERROR("Failed to set sync property on sink '%s'", GST_ELEMENT_NAME(sink));
1342 : }
1343 2 : m_context.pendingSync.reset();
1344 2 : m_gstWrapper->gstObjectUnref(sink);
1345 : }
1346 : else
1347 : {
1348 1 : RIALTO_SERVER_LOG_DEBUG("Pending sync, sink is NULL");
1349 : }
1350 : }
1351 3 : return result;
1352 : }
1353 :
1354 3 : bool GstGenericPlayer::setSyncOff()
1355 : {
1356 3 : bool result{false};
1357 3 : if (m_context.pendingSyncOff.has_value())
1358 : {
1359 3 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1360 3 : if (decoder)
1361 : {
1362 2 : bool syncOff{m_context.pendingSyncOff.value()};
1363 2 : RIALTO_SERVER_LOG_DEBUG("Set sync-off to %s", syncOff ? "TRUE" : "FALSE");
1364 :
1365 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "sync-off"))
1366 : {
1367 1 : gboolean syncOffGboolean{decoder ? TRUE : FALSE};
1368 1 : m_glibWrapper->gObjectSet(decoder, "sync-off", syncOffGboolean, nullptr);
1369 1 : result = true;
1370 : }
1371 : else
1372 : {
1373 1 : RIALTO_SERVER_LOG_ERROR("Failed to set sync-off property on decoder '%s'", GST_ELEMENT_NAME(decoder));
1374 : }
1375 2 : m_context.pendingSyncOff.reset();
1376 2 : m_gstWrapper->gstObjectUnref(decoder);
1377 : }
1378 : else
1379 : {
1380 1 : RIALTO_SERVER_LOG_DEBUG("Pending sync-off, decoder is NULL");
1381 : }
1382 : }
1383 3 : return result;
1384 : }
1385 :
1386 6 : bool GstGenericPlayer::setStreamSyncMode(const MediaSourceType &type)
1387 : {
1388 6 : bool result{false};
1389 6 : int32_t streamSyncMode{0};
1390 : {
1391 6 : std::unique_lock lock{m_context.propertyMutex};
1392 6 : if (m_context.pendingStreamSyncMode.find(type) == m_context.pendingStreamSyncMode.end())
1393 : {
1394 0 : return false;
1395 : }
1396 6 : streamSyncMode = m_context.pendingStreamSyncMode[type];
1397 : }
1398 6 : if (MediaSourceType::AUDIO == type)
1399 : {
1400 3 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1401 3 : if (!decoder)
1402 : {
1403 1 : RIALTO_SERVER_LOG_DEBUG("Pending stream-sync-mode, decoder is NULL");
1404 1 : return false;
1405 : }
1406 :
1407 2 : RIALTO_SERVER_LOG_DEBUG("Set stream-sync-mode to %d", streamSyncMode);
1408 :
1409 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "stream-sync-mode"))
1410 : {
1411 1 : gint streamSyncModeGint{static_cast<gint>(streamSyncMode)};
1412 1 : m_glibWrapper->gObjectSet(decoder, "stream-sync-mode", streamSyncModeGint, nullptr);
1413 1 : result = true;
1414 : }
1415 : else
1416 : {
1417 1 : RIALTO_SERVER_LOG_ERROR("Failed to set stream-sync-mode property on decoder '%s'", GST_ELEMENT_NAME(decoder));
1418 : }
1419 2 : m_gstWrapper->gstObjectUnref(decoder);
1420 2 : std::unique_lock lock{m_context.propertyMutex};
1421 2 : m_context.pendingStreamSyncMode.erase(type);
1422 : }
1423 3 : else if (MediaSourceType::VIDEO == type)
1424 : {
1425 3 : GstElement *parser = getParser(MediaSourceType::VIDEO);
1426 3 : if (!parser)
1427 : {
1428 1 : RIALTO_SERVER_LOG_DEBUG("Pending syncmode-streaming, parser is NULL");
1429 1 : return false;
1430 : }
1431 :
1432 2 : gboolean streamSyncModeBoolean{static_cast<gboolean>(streamSyncMode)};
1433 2 : RIALTO_SERVER_LOG_DEBUG("Set syncmode-streaming to %d", streamSyncMode);
1434 :
1435 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(parser), "syncmode-streaming"))
1436 : {
1437 1 : m_glibWrapper->gObjectSet(parser, "syncmode-streaming", streamSyncModeBoolean, nullptr);
1438 1 : result = true;
1439 : }
1440 : else
1441 : {
1442 1 : RIALTO_SERVER_LOG_ERROR("Failed to set syncmode-streaming property on parser '%s'", GST_ELEMENT_NAME(parser));
1443 : }
1444 2 : m_gstWrapper->gstObjectUnref(parser);
1445 2 : std::unique_lock lock{m_context.propertyMutex};
1446 2 : m_context.pendingStreamSyncMode.erase(type);
1447 : }
1448 4 : return result;
1449 : }
1450 :
1451 3 : bool GstGenericPlayer::setRenderFrame()
1452 : {
1453 3 : bool result{false};
1454 3 : if (m_context.pendingRenderFrame)
1455 : {
1456 5 : static const std::string kStepOnPrerollPropertyName = "frame-step-on-preroll";
1457 3 : GstElement *sink{getSink(MediaSourceType::VIDEO)};
1458 3 : if (sink)
1459 : {
1460 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), kStepOnPrerollPropertyName.c_str()))
1461 : {
1462 1 : RIALTO_SERVER_LOG_INFO("Rendering preroll");
1463 :
1464 1 : m_glibWrapper->gObjectSet(sink, kStepOnPrerollPropertyName.c_str(), 1, nullptr);
1465 1 : m_gstWrapper->gstElementSendEvent(sink, m_gstWrapper->gstEventNewStep(GST_FORMAT_BUFFERS, 1, 1.0, true,
1466 : false));
1467 1 : m_glibWrapper->gObjectSet(sink, kStepOnPrerollPropertyName.c_str(), 0, nullptr);
1468 1 : result = true;
1469 : }
1470 : else
1471 : {
1472 1 : RIALTO_SERVER_LOG_ERROR("Video sink doesn't have property `%s`", kStepOnPrerollPropertyName.c_str());
1473 : }
1474 2 : m_gstWrapper->gstObjectUnref(sink);
1475 2 : m_context.pendingRenderFrame = false;
1476 : }
1477 : else
1478 : {
1479 1 : RIALTO_SERVER_LOG_DEBUG("Pending render frame, sink is NULL");
1480 : }
1481 : }
1482 3 : return result;
1483 : }
1484 :
1485 3 : bool GstGenericPlayer::setBufferingLimit()
1486 : {
1487 3 : bool result{false};
1488 3 : guint bufferingLimit{0};
1489 : {
1490 3 : std::unique_lock lock{m_context.propertyMutex};
1491 3 : if (!m_context.pendingBufferingLimit.has_value())
1492 : {
1493 0 : return false;
1494 : }
1495 3 : bufferingLimit = static_cast<guint>(m_context.pendingBufferingLimit.value());
1496 : }
1497 :
1498 3 : GstElement *decoder{getDecoder(MediaSourceType::AUDIO)};
1499 3 : if (decoder)
1500 : {
1501 2 : RIALTO_SERVER_LOG_DEBUG("Set limit-buffering-ms to %u", bufferingLimit);
1502 :
1503 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "limit-buffering-ms"))
1504 : {
1505 1 : m_glibWrapper->gObjectSet(decoder, "limit-buffering-ms", bufferingLimit, nullptr);
1506 1 : result = true;
1507 : }
1508 : else
1509 : {
1510 1 : RIALTO_SERVER_LOG_ERROR("Failed to set limit-buffering-ms property on decoder '%s'",
1511 : GST_ELEMENT_NAME(decoder));
1512 : }
1513 2 : m_gstWrapper->gstObjectUnref(decoder);
1514 2 : std::unique_lock lock{m_context.propertyMutex};
1515 2 : m_context.pendingBufferingLimit.reset();
1516 : }
1517 : else
1518 : {
1519 1 : RIALTO_SERVER_LOG_DEBUG("Pending limit-buffering-ms, decoder is NULL");
1520 : }
1521 3 : return result;
1522 : }
1523 :
1524 2 : bool GstGenericPlayer::setUseBuffering()
1525 : {
1526 2 : std::unique_lock lock{m_context.propertyMutex};
1527 2 : if (m_context.pendingUseBuffering.has_value())
1528 : {
1529 2 : if (m_context.playbackGroup.m_curAudioDecodeBin)
1530 : {
1531 1 : gboolean useBufferingGboolean{m_context.pendingUseBuffering.value() ? TRUE : FALSE};
1532 1 : RIALTO_SERVER_LOG_DEBUG("Set use-buffering to %d", useBufferingGboolean);
1533 1 : m_glibWrapper->gObjectSet(m_context.playbackGroup.m_curAudioDecodeBin, "use-buffering",
1534 : useBufferingGboolean, nullptr);
1535 1 : m_context.pendingUseBuffering.reset();
1536 1 : return true;
1537 : }
1538 : else
1539 : {
1540 1 : RIALTO_SERVER_LOG_DEBUG("Pending use-buffering, decodebin is NULL");
1541 : }
1542 : }
1543 1 : return false;
1544 2 : }
1545 :
1546 8 : bool GstGenericPlayer::setWesterossinkSecondaryVideo()
1547 : {
1548 8 : bool result = false;
1549 8 : GstElementFactory *factory = m_gstWrapper->gstElementFactoryFind("westerossink");
1550 8 : if (factory)
1551 : {
1552 7 : GstElement *videoSink = m_gstWrapper->gstElementFactoryCreate(factory, nullptr);
1553 7 : if (videoSink)
1554 : {
1555 5 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "res-usage"))
1556 : {
1557 4 : m_glibWrapper->gObjectSet(videoSink, "res-usage", 0x0u, nullptr);
1558 4 : m_glibWrapper->gObjectSet(m_context.pipeline, "video-sink", videoSink, nullptr);
1559 4 : result = true;
1560 : }
1561 : else
1562 : {
1563 1 : RIALTO_SERVER_LOG_ERROR("Failed to set the westerossink res-usage");
1564 1 : m_gstWrapper->gstObjectUnref(GST_OBJECT(videoSink));
1565 : }
1566 : }
1567 : else
1568 : {
1569 2 : RIALTO_SERVER_LOG_ERROR("Failed to create the westerossink");
1570 : }
1571 :
1572 7 : m_gstWrapper->gstObjectUnref(GST_OBJECT(factory));
1573 : }
1574 : else
1575 : {
1576 : // No westeros sink
1577 1 : result = true;
1578 : }
1579 :
1580 8 : return result;
1581 : }
1582 :
1583 8 : bool GstGenericPlayer::setErmContext()
1584 : {
1585 8 : bool result = false;
1586 8 : GstContext *context = m_gstWrapper->gstContextNew("erm", false);
1587 8 : if (context)
1588 : {
1589 6 : GstStructure *contextStructure = m_gstWrapper->gstContextWritableStructure(context);
1590 6 : if (contextStructure)
1591 : {
1592 5 : m_gstWrapper->gstStructureSet(contextStructure, "res-usage", G_TYPE_UINT, 0x0u, nullptr);
1593 5 : m_gstWrapper->gstElementSetContext(GST_ELEMENT(m_context.pipeline), context);
1594 5 : result = true;
1595 : }
1596 : else
1597 : {
1598 1 : RIALTO_SERVER_LOG_ERROR("Failed to create the erm structure");
1599 : }
1600 6 : m_gstWrapper->gstContextUnref(context);
1601 : }
1602 : else
1603 : {
1604 2 : RIALTO_SERVER_LOG_ERROR("Failed to create the erm context");
1605 : }
1606 :
1607 8 : return result;
1608 : }
1609 :
1610 6 : void GstGenericPlayer::startPositionReportingAndCheckAudioUnderflowTimer()
1611 : {
1612 6 : if (m_positionReportingAndCheckAudioUnderflowTimer && m_positionReportingAndCheckAudioUnderflowTimer->isActive())
1613 : {
1614 1 : return;
1615 : }
1616 :
1617 15 : m_positionReportingAndCheckAudioUnderflowTimer = m_timerFactory->createTimer(
1618 : kPositionReportTimerMs,
1619 10 : [this]()
1620 : {
1621 1 : if (m_workerThread)
1622 : {
1623 1 : m_workerThread->enqueueTask(m_taskFactory->createReportPosition(m_context));
1624 1 : m_workerThread->enqueueTask(m_taskFactory->createCheckAudioUnderflow(m_context, *this));
1625 : }
1626 1 : },
1627 5 : firebolt::rialto::common::TimerType::PERIODIC);
1628 : }
1629 :
1630 4 : void GstGenericPlayer::stopPositionReportingAndCheckAudioUnderflowTimer()
1631 : {
1632 4 : if (m_positionReportingAndCheckAudioUnderflowTimer && m_positionReportingAndCheckAudioUnderflowTimer->isActive())
1633 : {
1634 1 : m_positionReportingAndCheckAudioUnderflowTimer->cancel();
1635 1 : m_positionReportingAndCheckAudioUnderflowTimer.reset();
1636 : }
1637 4 : }
1638 :
1639 2 : void GstGenericPlayer::stopWorkerThread()
1640 : {
1641 2 : if (m_workerThread)
1642 : {
1643 2 : m_workerThread->stop();
1644 : }
1645 : }
1646 :
1647 0 : void GstGenericPlayer::setPendingPlaybackRate()
1648 : {
1649 0 : RIALTO_SERVER_LOG_INFO("Setting pending playback rate");
1650 0 : setPlaybackRate(m_context.pendingPlaybackRate);
1651 : }
1652 :
1653 1 : void GstGenericPlayer::renderFrame()
1654 : {
1655 1 : if (m_workerThread)
1656 : {
1657 1 : m_workerThread->enqueueTask(m_taskFactory->createRenderFrame(m_context, *this));
1658 : }
1659 : }
1660 :
1661 16 : void GstGenericPlayer::setVolume(double targetVolume, uint32_t volumeDuration, firebolt::rialto::EaseType easeType)
1662 : {
1663 16 : if (m_workerThread)
1664 : {
1665 32 : m_workerThread->enqueueTask(
1666 32 : m_taskFactory->createSetVolume(m_context, *this, targetVolume, volumeDuration, easeType));
1667 : }
1668 16 : }
1669 :
1670 3 : bool GstGenericPlayer::getVolume(double ¤tVolume)
1671 : {
1672 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
1673 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
1674 3 : if (!m_context.pipeline)
1675 : {
1676 0 : return false;
1677 : }
1678 :
1679 : // NOTE: No gstreamer documentation for "fade-volume" could be found at the time this code was written.
1680 : // Therefore the author performed several tests on a supported platform (Flex2) to determine the behaviour of this property.
1681 : // The code has been written to be backwardly compatible on platforms that don't have this property.
1682 : // The observed behaviour was:
1683 : // - if the returned fade volume is negative then audio-fade is not active. In this case the usual technique
1684 : // to find volume in the pipeline works and is used.
1685 : // - if the returned fade volume is positive then audio-fade is active. In this case the returned fade volume
1686 : // directly returns the current volume level 0=min to 100=max (and the pipeline's current volume level is
1687 : // meaningless and doesn't contribute in this case).
1688 3 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1689 5 : if (m_context.audioFadeEnabled && sink &&
1690 2 : m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "fade-volume"))
1691 : {
1692 2 : gint fadeVolume{-100};
1693 2 : m_glibWrapper->gObjectGet(sink, "fade-volume", &fadeVolume, NULL);
1694 2 : if (fadeVolume < 0)
1695 : {
1696 1 : currentVolume = m_gstWrapper->gstStreamVolumeGetVolume(GST_STREAM_VOLUME(m_context.pipeline),
1697 : GST_STREAM_VOLUME_FORMAT_LINEAR);
1698 1 : RIALTO_SERVER_LOG_INFO("Fade volume is negative, using volume from pipeline: %f", currentVolume);
1699 : }
1700 : else
1701 : {
1702 1 : currentVolume = static_cast<double>(fadeVolume) / 100.0;
1703 1 : RIALTO_SERVER_LOG_INFO("Fade volume is supported: %f", currentVolume);
1704 : }
1705 : }
1706 : else
1707 : {
1708 1 : currentVolume = m_gstWrapper->gstStreamVolumeGetVolume(GST_STREAM_VOLUME(m_context.pipeline),
1709 : GST_STREAM_VOLUME_FORMAT_LINEAR);
1710 1 : RIALTO_SERVER_LOG_INFO("Fade volume is not supported, using volume from pipeline: %f", currentVolume);
1711 : }
1712 :
1713 3 : if (sink)
1714 2 : m_gstWrapper->gstObjectUnref(sink);
1715 :
1716 3 : return true;
1717 : }
1718 :
1719 1 : void GstGenericPlayer::setMute(const MediaSourceType &mediaSourceType, bool mute)
1720 : {
1721 1 : if (m_workerThread)
1722 : {
1723 1 : m_workerThread->enqueueTask(m_taskFactory->createSetMute(m_context, *this, mediaSourceType, mute));
1724 : }
1725 : }
1726 :
1727 5 : bool GstGenericPlayer::getMute(const MediaSourceType &mediaSourceType, bool &mute)
1728 : {
1729 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
1730 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
1731 5 : if (mediaSourceType == MediaSourceType::SUBTITLE)
1732 : {
1733 2 : if (!m_context.subtitleSink)
1734 : {
1735 1 : RIALTO_SERVER_LOG_ERROR("There is no subtitle sink");
1736 1 : return false;
1737 : }
1738 1 : gboolean muteValue{FALSE};
1739 1 : m_glibWrapper->gObjectGet(m_context.subtitleSink, "mute", &muteValue, nullptr);
1740 1 : mute = muteValue;
1741 : }
1742 3 : else if (mediaSourceType == MediaSourceType::AUDIO)
1743 : {
1744 2 : if (!m_context.pipeline)
1745 : {
1746 1 : return false;
1747 : }
1748 1 : mute = m_gstWrapper->gstStreamVolumeGetMute(GST_STREAM_VOLUME(m_context.pipeline));
1749 : }
1750 : else
1751 : {
1752 1 : RIALTO_SERVER_LOG_ERROR("Getting mute for type %s unsupported", common::convertMediaSourceType(mediaSourceType));
1753 1 : return false;
1754 : }
1755 :
1756 2 : return true;
1757 : }
1758 :
1759 1 : bool GstGenericPlayer::isAsync(const MediaSourceType &mediaSourceType) const
1760 : {
1761 1 : GstElement *sink = getSink(mediaSourceType);
1762 1 : if (!sink)
1763 : {
1764 0 : RIALTO_SERVER_LOG_WARN("Sink not found for %s", common::convertMediaSourceType(mediaSourceType));
1765 0 : return true; // Our sinks are async by default
1766 : }
1767 1 : gboolean returnValue{TRUE};
1768 1 : m_glibWrapper->gObjectGet(sink, "async", &returnValue, nullptr);
1769 1 : m_gstWrapper->gstObjectUnref(sink);
1770 1 : return returnValue == TRUE;
1771 : }
1772 :
1773 1 : void GstGenericPlayer::setTextTrackIdentifier(const std::string &textTrackIdentifier)
1774 : {
1775 1 : if (m_workerThread)
1776 : {
1777 1 : m_workerThread->enqueueTask(m_taskFactory->createSetTextTrackIdentifier(m_context, textTrackIdentifier));
1778 : }
1779 : }
1780 :
1781 3 : bool GstGenericPlayer::getTextTrackIdentifier(std::string &textTrackIdentifier)
1782 : {
1783 3 : if (!m_context.subtitleSink)
1784 : {
1785 1 : RIALTO_SERVER_LOG_ERROR("There is no subtitle sink");
1786 1 : return false;
1787 : }
1788 :
1789 2 : gchar *identifier = nullptr;
1790 2 : m_glibWrapper->gObjectGet(m_context.subtitleSink, "text-track-identifier", &identifier, nullptr);
1791 :
1792 2 : if (identifier)
1793 : {
1794 1 : textTrackIdentifier = identifier;
1795 1 : m_glibWrapper->gFree(identifier);
1796 1 : return true;
1797 : }
1798 : else
1799 : {
1800 1 : RIALTO_SERVER_LOG_ERROR("Failed to get text track identifier");
1801 1 : return false;
1802 : }
1803 : }
1804 :
1805 1 : bool GstGenericPlayer::setLowLatency(bool lowLatency)
1806 : {
1807 1 : if (m_workerThread)
1808 : {
1809 1 : m_workerThread->enqueueTask(m_taskFactory->createSetLowLatency(m_context, *this, lowLatency));
1810 : }
1811 1 : return true;
1812 : }
1813 :
1814 1 : bool GstGenericPlayer::setSync(bool sync)
1815 : {
1816 1 : if (m_workerThread)
1817 : {
1818 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSync(m_context, *this, sync));
1819 : }
1820 1 : return true;
1821 : }
1822 :
1823 4 : bool GstGenericPlayer::getSync(bool &sync)
1824 : {
1825 4 : bool returnValue{false};
1826 4 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1827 4 : if (sink)
1828 : {
1829 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "sync"))
1830 : {
1831 1 : m_glibWrapper->gObjectGet(sink, "sync", &sync, nullptr);
1832 1 : returnValue = true;
1833 : }
1834 : else
1835 : {
1836 1 : RIALTO_SERVER_LOG_ERROR("Sync not supported in sink '%s'", GST_ELEMENT_NAME(sink));
1837 : }
1838 2 : m_gstWrapper->gstObjectUnref(sink);
1839 : }
1840 2 : else if (m_context.pendingSync.has_value())
1841 : {
1842 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
1843 1 : sync = m_context.pendingSync.value();
1844 1 : returnValue = true;
1845 : }
1846 : else
1847 : {
1848 : // We dont know the default setting on the sync, so return failure here
1849 1 : RIALTO_SERVER_LOG_WARN("No audio sink attached or queued value");
1850 : }
1851 :
1852 4 : return returnValue;
1853 : }
1854 :
1855 1 : bool GstGenericPlayer::setSyncOff(bool syncOff)
1856 : {
1857 1 : if (m_workerThread)
1858 : {
1859 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSyncOff(m_context, *this, syncOff));
1860 : }
1861 1 : return true;
1862 : }
1863 :
1864 1 : bool GstGenericPlayer::setStreamSyncMode(const MediaSourceType &mediaSourceType, int32_t streamSyncMode)
1865 : {
1866 1 : if (m_workerThread)
1867 : {
1868 2 : m_workerThread->enqueueTask(
1869 2 : m_taskFactory->createSetStreamSyncMode(m_context, *this, mediaSourceType, streamSyncMode));
1870 : }
1871 1 : return true;
1872 : }
1873 :
1874 5 : bool GstGenericPlayer::getStreamSyncMode(int32_t &streamSyncMode)
1875 : {
1876 5 : bool returnValue{false};
1877 5 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1878 5 : if (decoder && m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "stream-sync-mode"))
1879 : {
1880 2 : m_glibWrapper->gObjectGet(decoder, "stream-sync-mode", &streamSyncMode, nullptr);
1881 2 : returnValue = true;
1882 : }
1883 : else
1884 : {
1885 3 : std::unique_lock lock{m_context.propertyMutex};
1886 3 : if (m_context.pendingStreamSyncMode.find(MediaSourceType::AUDIO) != m_context.pendingStreamSyncMode.end())
1887 : {
1888 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
1889 1 : streamSyncMode = m_context.pendingStreamSyncMode[MediaSourceType::AUDIO];
1890 1 : returnValue = true;
1891 : }
1892 : else
1893 : {
1894 2 : RIALTO_SERVER_LOG_ERROR("Stream sync mode not supported in decoder '%s'",
1895 : (decoder ? GST_ELEMENT_NAME(decoder) : "null"));
1896 : }
1897 3 : }
1898 :
1899 5 : if (decoder)
1900 3 : m_gstWrapper->gstObjectUnref(GST_OBJECT(decoder));
1901 :
1902 5 : return returnValue;
1903 : }
1904 :
1905 1 : void GstGenericPlayer::ping(std::unique_ptr<IHeartbeatHandler> &&heartbeatHandler)
1906 : {
1907 1 : if (m_workerThread)
1908 : {
1909 1 : m_workerThread->enqueueTask(m_taskFactory->createPing(std::move(heartbeatHandler)));
1910 : }
1911 : }
1912 :
1913 1 : void GstGenericPlayer::flush(const MediaSourceType &mediaSourceType, bool resetTime, bool &async)
1914 : {
1915 1 : if (m_workerThread)
1916 : {
1917 1 : async = isAsync(mediaSourceType);
1918 1 : m_flushWatcher->setFlushing(mediaSourceType, async);
1919 1 : m_workerThread->enqueueTask(m_taskFactory->createFlush(m_context, *this, mediaSourceType, resetTime));
1920 : }
1921 : }
1922 :
1923 1 : void GstGenericPlayer::setSourcePosition(const MediaSourceType &mediaSourceType, int64_t position, bool resetTime,
1924 : double appliedRate, uint64_t stopPosition)
1925 : {
1926 1 : if (m_workerThread)
1927 : {
1928 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSourcePosition(m_context, *this, mediaSourceType, position,
1929 : resetTime, appliedRate, stopPosition));
1930 : }
1931 : }
1932 :
1933 1 : void GstGenericPlayer::processAudioGap(int64_t position, uint32_t duration, int64_t discontinuityGap, bool audioAac)
1934 : {
1935 1 : if (m_workerThread)
1936 : {
1937 2 : m_workerThread->enqueueTask(
1938 2 : m_taskFactory->createProcessAudioGap(m_context, position, duration, discontinuityGap, audioAac));
1939 : }
1940 1 : }
1941 :
1942 1 : void GstGenericPlayer::setBufferingLimit(uint32_t limitBufferingMs)
1943 : {
1944 1 : if (m_workerThread)
1945 : {
1946 1 : m_workerThread->enqueueTask(m_taskFactory->createSetBufferingLimit(m_context, *this, limitBufferingMs));
1947 : }
1948 : }
1949 :
1950 5 : bool GstGenericPlayer::getBufferingLimit(uint32_t &limitBufferingMs)
1951 : {
1952 5 : bool returnValue{false};
1953 5 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1954 5 : if (decoder && m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "limit-buffering-ms"))
1955 : {
1956 2 : m_glibWrapper->gObjectGet(decoder, "limit-buffering-ms", &limitBufferingMs, nullptr);
1957 2 : returnValue = true;
1958 : }
1959 : else
1960 : {
1961 3 : std::unique_lock lock{m_context.propertyMutex};
1962 3 : if (m_context.pendingBufferingLimit.has_value())
1963 : {
1964 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
1965 1 : limitBufferingMs = m_context.pendingBufferingLimit.value();
1966 1 : returnValue = true;
1967 : }
1968 : else
1969 : {
1970 2 : RIALTO_SERVER_LOG_ERROR("buffering limit not supported in decoder '%s'",
1971 : (decoder ? GST_ELEMENT_NAME(decoder) : "null"));
1972 : }
1973 3 : }
1974 :
1975 5 : if (decoder)
1976 3 : m_gstWrapper->gstObjectUnref(GST_OBJECT(decoder));
1977 :
1978 5 : return returnValue;
1979 : }
1980 :
1981 1 : void GstGenericPlayer::setUseBuffering(bool useBuffering)
1982 : {
1983 1 : if (m_workerThread)
1984 : {
1985 1 : m_workerThread->enqueueTask(m_taskFactory->createSetUseBuffering(m_context, *this, useBuffering));
1986 : }
1987 : }
1988 :
1989 3 : bool GstGenericPlayer::getUseBuffering(bool &useBuffering)
1990 : {
1991 3 : if (m_context.playbackGroup.m_curAudioDecodeBin)
1992 : {
1993 1 : m_glibWrapper->gObjectGet(m_context.playbackGroup.m_curAudioDecodeBin, "use-buffering", &useBuffering, nullptr);
1994 1 : return true;
1995 : }
1996 : else
1997 : {
1998 2 : std::unique_lock lock{m_context.propertyMutex};
1999 2 : if (m_context.pendingUseBuffering.has_value())
2000 : {
2001 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
2002 1 : useBuffering = m_context.pendingUseBuffering.value();
2003 1 : return true;
2004 : }
2005 2 : }
2006 1 : return false;
2007 : }
2008 :
2009 1 : void GstGenericPlayer::switchSource(const std::unique_ptr<IMediaPipeline::MediaSource> &mediaSource)
2010 : {
2011 1 : if (m_workerThread)
2012 : {
2013 1 : m_workerThread->enqueueTask(m_taskFactory->createSwitchSource(*this, mediaSource));
2014 : }
2015 : }
2016 :
2017 3 : bool GstGenericPlayer::isVideoMaster(bool &isVideoMaster)
2018 : {
2019 3 : GstRegistry *reg = m_gstWrapper->gstRegistryGet();
2020 3 : if (!reg)
2021 : {
2022 1 : RIALTO_SERVER_LOG_ERROR("Failed get the gst registry");
2023 1 : return false;
2024 : }
2025 2 : GstPluginFeature *feature{nullptr};
2026 2 : isVideoMaster = true;
2027 2 : if (nullptr != (feature = m_gstWrapper->gstRegistryLookupFeature(reg, "amlhalasink")))
2028 : {
2029 1 : isVideoMaster = false;
2030 1 : m_gstWrapper->gstObjectUnref(feature);
2031 : }
2032 2 : return true;
2033 : }
2034 :
2035 1 : void GstGenericPlayer::handleBusMessage(GstMessage *message)
2036 : {
2037 1 : m_workerThread->enqueueTask(m_taskFactory->createHandleBusMessage(m_context, *this, message, *m_flushWatcher));
2038 : }
2039 :
2040 1 : void GstGenericPlayer::updatePlaybackGroup(GstElement *typefind, const GstCaps *caps)
2041 : {
2042 1 : m_workerThread->enqueueTask(m_taskFactory->createUpdatePlaybackGroup(m_context, *this, typefind, caps));
2043 : }
2044 :
2045 3 : void GstGenericPlayer::addAutoVideoSinkChild(GObject *object)
2046 : {
2047 : // Only add children that are sinks
2048 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2049 : {
2050 2 : RIALTO_SERVER_LOG_DEBUG("Store AutoVideoSink child sink");
2051 :
2052 2 : if (m_context.autoVideoChildSink && m_context.autoVideoChildSink != GST_ELEMENT(object))
2053 : {
2054 1 : RIALTO_SERVER_LOG_MIL("AutoVideoSink child is been overwritten");
2055 : }
2056 2 : m_context.autoVideoChildSink = GST_ELEMENT(object);
2057 : }
2058 3 : }
2059 :
2060 3 : void GstGenericPlayer::addAutoAudioSinkChild(GObject *object)
2061 : {
2062 : // Only add children that are sinks
2063 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2064 : {
2065 2 : RIALTO_SERVER_LOG_DEBUG("Store AutoAudioSink child sink");
2066 :
2067 2 : if (m_context.autoAudioChildSink && m_context.autoAudioChildSink != GST_ELEMENT(object))
2068 : {
2069 1 : RIALTO_SERVER_LOG_MIL("AutoAudioSink child is been overwritten");
2070 : }
2071 2 : m_context.autoAudioChildSink = GST_ELEMENT(object);
2072 : }
2073 3 : }
2074 :
2075 3 : void GstGenericPlayer::removeAutoVideoSinkChild(GObject *object)
2076 : {
2077 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2078 : {
2079 3 : RIALTO_SERVER_LOG_DEBUG("Remove AutoVideoSink child sink");
2080 :
2081 3 : if (m_context.autoVideoChildSink && m_context.autoVideoChildSink != GST_ELEMENT(object))
2082 : {
2083 1 : RIALTO_SERVER_LOG_MIL("AutoVideoSink child sink is not the same as the one stored");
2084 1 : return;
2085 : }
2086 :
2087 2 : m_context.autoVideoChildSink = nullptr;
2088 : }
2089 : }
2090 :
2091 3 : void GstGenericPlayer::removeAutoAudioSinkChild(GObject *object)
2092 : {
2093 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2094 : {
2095 3 : RIALTO_SERVER_LOG_DEBUG("Remove AutoAudioSink child sink");
2096 :
2097 3 : if (m_context.autoAudioChildSink && m_context.autoAudioChildSink != GST_ELEMENT(object))
2098 : {
2099 1 : RIALTO_SERVER_LOG_MIL("AutoAudioSink child sink is not the same as the one stored");
2100 1 : return;
2101 : }
2102 :
2103 2 : m_context.autoAudioChildSink = nullptr;
2104 : }
2105 : }
2106 :
2107 14 : GstElement *GstGenericPlayer::getSinkChildIfAutoVideoSink(GstElement *sink) const
2108 : {
2109 14 : const gchar *kTmpName = m_glibWrapper->gTypeName(G_OBJECT_TYPE(sink));
2110 14 : if (!kTmpName)
2111 0 : return sink;
2112 :
2113 28 : const std::string kElementTypeName{kTmpName};
2114 14 : if (kElementTypeName == "GstAutoVideoSink")
2115 : {
2116 1 : if (!m_context.autoVideoChildSink)
2117 : {
2118 0 : RIALTO_SERVER_LOG_WARN("No child sink has been added to the autovideosink");
2119 : }
2120 : else
2121 : {
2122 1 : return m_context.autoVideoChildSink;
2123 : }
2124 : }
2125 13 : return sink;
2126 14 : }
2127 :
2128 11 : GstElement *GstGenericPlayer::getSinkChildIfAutoAudioSink(GstElement *sink) const
2129 : {
2130 11 : const gchar *kTmpName = m_glibWrapper->gTypeName(G_OBJECT_TYPE(sink));
2131 11 : if (!kTmpName)
2132 0 : return sink;
2133 :
2134 22 : const std::string kElementTypeName{kTmpName};
2135 11 : if (kElementTypeName == "GstAutoAudioSink")
2136 : {
2137 1 : if (!m_context.autoAudioChildSink)
2138 : {
2139 0 : RIALTO_SERVER_LOG_WARN("No child sink has been added to the autoaudiosink");
2140 : }
2141 : else
2142 : {
2143 1 : return m_context.autoAudioChildSink;
2144 : }
2145 : }
2146 10 : return sink;
2147 11 : }
2148 :
2149 209 : void GstGenericPlayer::setPlaybinFlags(bool enableAudio)
2150 : {
2151 209 : unsigned flags = getGstPlayFlag("video") | getGstPlayFlag("native-video") | getGstPlayFlag("text");
2152 :
2153 209 : if (enableAudio)
2154 : {
2155 209 : flags |= getGstPlayFlag("audio");
2156 209 : flags |= shouldEnableNativeAudio() ? getGstPlayFlag("native-audio") : 0;
2157 : }
2158 :
2159 209 : m_glibWrapper->gObjectSet(m_context.pipeline, "flags", flags, nullptr);
2160 : }
2161 :
2162 209 : bool GstGenericPlayer::shouldEnableNativeAudio()
2163 : {
2164 209 : GstElementFactory *factory = m_gstWrapper->gstElementFactoryFind("brcmaudiosink");
2165 209 : if (factory)
2166 : {
2167 1 : m_gstWrapper->gstObjectUnref(GST_OBJECT(factory));
2168 1 : return true;
2169 : }
2170 208 : return false;
2171 : }
2172 :
2173 : }; // namespace firebolt::rialto::server
|