Line data Source code
1 : /*
2 : * If not stated otherwise in this file or this component's LICENSE file the
3 : * following copyright and licenses apply:
4 : *
5 : * Copyright 2022 Sky UK
6 : *
7 : * Licensed under the Apache License, Version 2.0 (the "License");
8 : * you may not use this file except in compliance with the License.
9 : * You may obtain a copy of the License at
10 : *
11 : * http://www.apache.org/licenses/LICENSE-2.0
12 : *
13 : * Unless required by applicable law or agreed to in writing, software
14 : * distributed under the License is distributed on an "AS IS" BASIS,
15 : * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 : * See the License for the specific language governing permissions and
17 : * limitations under the License.
18 : */
19 :
20 : #include <chrono>
21 : #include <cinttypes>
22 : #include <stdexcept>
23 :
24 : #include "FlushWatcher.h"
25 : #include "GstDispatcherThread.h"
26 : #include "GstGenericPlayer.h"
27 : #include "GstProtectionMetadata.h"
28 : #include "IGstTextTrackSinkFactory.h"
29 : #include "IMediaPipeline.h"
30 : #include "ITimer.h"
31 : #include "RialtoServerLogging.h"
32 : #include "TypeConverters.h"
33 : #include "Utils.h"
34 : #include "WorkerThread.h"
35 : #include "tasks/generic/GenericPlayerTaskFactory.h"
36 :
37 : namespace
38 : {
39 : /**
40 : * @brief Report position interval in ms.
41 : * The position reporting timer should be started whenever the PLAYING state is entered and stopped
42 : * whenever the session moves to another playback state.
43 : */
44 : constexpr std::chrono::milliseconds kPositionReportTimerMs{250};
45 :
46 1 : bool operator==(const firebolt::rialto::server::SegmentData &lhs, const firebolt::rialto::server::SegmentData &rhs)
47 : {
48 2 : return (lhs.position == rhs.position) && (lhs.resetTime == rhs.resetTime) && (lhs.appliedRate == rhs.appliedRate) &&
49 2 : (lhs.stopPosition == rhs.stopPosition);
50 : }
51 : } // namespace
52 :
53 : namespace firebolt::rialto::server
54 : {
55 : std::weak_ptr<IGstGenericPlayerFactory> GstGenericPlayerFactory::m_factory;
56 :
57 3 : std::shared_ptr<IGstGenericPlayerFactory> IGstGenericPlayerFactory::getFactory()
58 : {
59 3 : std::shared_ptr<IGstGenericPlayerFactory> factory = GstGenericPlayerFactory::m_factory.lock();
60 :
61 3 : if (!factory)
62 : {
63 : try
64 : {
65 3 : factory = std::make_shared<GstGenericPlayerFactory>();
66 : }
67 0 : catch (const std::exception &e)
68 : {
69 0 : RIALTO_SERVER_LOG_ERROR("Failed to create the gstreamer player factory, reason: %s", e.what());
70 : }
71 :
72 3 : GstGenericPlayerFactory::m_factory = factory;
73 : }
74 :
75 3 : return factory;
76 : }
77 :
78 1 : std::unique_ptr<IGstGenericPlayer> GstGenericPlayerFactory::createGstGenericPlayer(
79 : IGstGenericPlayerClient *client, IDecryptionService &decryptionService, MediaType type,
80 : const VideoRequirements &videoRequirements,
81 : const std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapperFactory> &rdkGstreamerUtilsWrapperFactory)
82 : {
83 1 : std::unique_ptr<IGstGenericPlayer> gstPlayer;
84 :
85 : try
86 : {
87 1 : auto gstWrapperFactory = firebolt::rialto::wrappers::IGstWrapperFactory::getFactory();
88 1 : auto glibWrapperFactory = firebolt::rialto::wrappers::IGlibWrapperFactory::getFactory();
89 1 : std::shared_ptr<firebolt::rialto::wrappers::IGstWrapper> gstWrapper;
90 1 : std::shared_ptr<firebolt::rialto::wrappers::IGlibWrapper> glibWrapper;
91 1 : std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapper> rdkGstreamerUtilsWrapper;
92 1 : if ((!gstWrapperFactory) || (!(gstWrapper = gstWrapperFactory->getGstWrapper())))
93 : {
94 0 : throw std::runtime_error("Cannot create GstWrapper");
95 : }
96 1 : if ((!glibWrapperFactory) || (!(glibWrapper = glibWrapperFactory->getGlibWrapper())))
97 : {
98 0 : throw std::runtime_error("Cannot create GlibWrapper");
99 : }
100 2 : if ((!rdkGstreamerUtilsWrapperFactory) ||
101 2 : (!(rdkGstreamerUtilsWrapper = rdkGstreamerUtilsWrapperFactory->createRdkGstreamerUtilsWrapper())))
102 : {
103 0 : throw std::runtime_error("Cannot create RdkGstreamerUtilsWrapper");
104 : }
105 : gstPlayer = std::make_unique<
106 2 : GstGenericPlayer>(client, decryptionService, type, videoRequirements, gstWrapper, glibWrapper,
107 2 : rdkGstreamerUtilsWrapper, IGstInitialiser::instance(), std::make_unique<FlushWatcher>(),
108 2 : IGstSrcFactory::getFactory(), common::ITimerFactory::getFactory(),
109 2 : std::make_unique<GenericPlayerTaskFactory>(client, gstWrapper, glibWrapper,
110 : rdkGstreamerUtilsWrapper,
111 2 : IGstTextTrackSinkFactory::createFactory()),
112 2 : std::make_unique<WorkerThreadFactory>(), std::make_unique<GstDispatcherThreadFactory>(),
113 3 : IGstProtectionMetadataHelperFactory::createFactory());
114 1 : }
115 0 : catch (const std::exception &e)
116 : {
117 0 : RIALTO_SERVER_LOG_ERROR("Failed to create the gstreamer player, reason: %s", e.what());
118 : }
119 :
120 1 : return gstPlayer;
121 : }
122 :
123 209 : GstGenericPlayer::GstGenericPlayer(
124 : IGstGenericPlayerClient *client, IDecryptionService &decryptionService, MediaType type,
125 : const VideoRequirements &videoRequirements,
126 : const std::shared_ptr<firebolt::rialto::wrappers::IGstWrapper> &gstWrapper,
127 : const std::shared_ptr<firebolt::rialto::wrappers::IGlibWrapper> &glibWrapper,
128 : const std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapper> &rdkGstreamerUtilsWrapper,
129 : const IGstInitialiser &gstInitialiser, std::unique_ptr<IFlushWatcher> &&flushWatcher,
130 : const std::shared_ptr<IGstSrcFactory> &gstSrcFactory, std::shared_ptr<common::ITimerFactory> timerFactory,
131 : std::unique_ptr<IGenericPlayerTaskFactory> taskFactory, std::unique_ptr<IWorkerThreadFactory> workerThreadFactory,
132 : std::unique_ptr<IGstDispatcherThreadFactory> gstDispatcherThreadFactory,
133 209 : std::shared_ptr<IGstProtectionMetadataHelperFactory> gstProtectionMetadataFactory)
134 209 : : m_gstPlayerClient(client), m_gstWrapper{gstWrapper}, m_glibWrapper{glibWrapper},
135 418 : m_rdkGstreamerUtilsWrapper{rdkGstreamerUtilsWrapper}, m_timerFactory{timerFactory},
136 627 : m_taskFactory{std::move(taskFactory)}, m_flushWatcher{std::move(flushWatcher)}
137 : {
138 209 : RIALTO_SERVER_LOG_DEBUG("GstGenericPlayer is constructed.");
139 :
140 209 : gstInitialiser.waitForInitialisation();
141 :
142 209 : m_context.decryptionService = &decryptionService;
143 :
144 209 : if ((!gstSrcFactory) || (!(m_context.gstSrc = gstSrcFactory->getGstSrc())))
145 : {
146 2 : throw std::runtime_error("Cannot create GstSrc");
147 : }
148 :
149 207 : if (!timerFactory)
150 : {
151 1 : throw std::runtime_error("TimeFactory is invalid");
152 : }
153 :
154 412 : if ((!gstProtectionMetadataFactory) ||
155 412 : (!(m_protectionMetadataWrapper = gstProtectionMetadataFactory->createProtectionMetadataWrapper(m_gstWrapper))))
156 : {
157 0 : throw std::runtime_error("Cannot create protection metadata wrapper");
158 : }
159 :
160 : // Ensure that rialtosrc has been initalised
161 206 : m_context.gstSrc->initSrc();
162 :
163 : // Start task thread
164 206 : if ((!workerThreadFactory) || (!(m_workerThread = workerThreadFactory->createWorkerThread())))
165 : {
166 0 : throw std::runtime_error("Failed to create the worker thread");
167 : }
168 :
169 : // Initialise pipeline
170 206 : switch (type)
171 : {
172 205 : case MediaType::MSE:
173 : {
174 205 : initMsePipeline();
175 205 : break;
176 : }
177 1 : default:
178 : {
179 1 : resetWorkerThread();
180 1 : throw std::runtime_error("Media type not supported");
181 : }
182 : }
183 :
184 : // Check the video requirements for a limited video.
185 : // If the video requirements are set to anything lower than the minimum, this playback is assumed to be a secondary
186 : // video in a dual video scenario.
187 205 : if ((kMinPrimaryVideoWidth > videoRequirements.maxWidth) || (kMinPrimaryVideoHeight > videoRequirements.maxHeight))
188 : {
189 8 : RIALTO_SERVER_LOG_MIL("Secondary video playback selected");
190 8 : bool westerossinkSecondaryVideoResult = setWesterossinkSecondaryVideo();
191 8 : bool ermContextResult = setErmContext();
192 8 : if (!westerossinkSecondaryVideoResult && !ermContextResult)
193 : {
194 1 : resetWorkerThread();
195 1 : termPipeline();
196 1 : throw std::runtime_error("Could not set secondary video");
197 : }
198 7 : }
199 : else
200 : {
201 197 : RIALTO_SERVER_LOG_MIL("Primary video playback selected");
202 : }
203 :
204 : m_gstDispatcherThread =
205 204 : gstDispatcherThreadFactory->createGstDispatcherThread(*this, m_context.pipeline, m_gstWrapper);
206 279 : }
207 :
208 408 : GstGenericPlayer::~GstGenericPlayer()
209 : {
210 204 : RIALTO_SERVER_LOG_DEBUG("GstGenericPlayer is destructed.");
211 :
212 204 : m_gstDispatcherThread.reset();
213 :
214 204 : resetWorkerThread();
215 :
216 204 : termPipeline();
217 408 : }
218 :
219 205 : void GstGenericPlayer::initMsePipeline()
220 : {
221 : // Make playbin
222 205 : m_context.pipeline = m_gstWrapper->gstElementFactoryMake("playbin", "media_pipeline");
223 : // Set pipeline flags
224 205 : setPlaybinFlags(true);
225 :
226 : // Set callbacks
227 205 : m_glibWrapper->gSignalConnect(m_context.pipeline, "source-setup", G_CALLBACK(&GstGenericPlayer::setupSource), this);
228 205 : m_glibWrapper->gSignalConnect(m_context.pipeline, "element-setup", G_CALLBACK(&GstGenericPlayer::setupElement), this);
229 205 : m_glibWrapper->gSignalConnect(m_context.pipeline, "deep-element-added",
230 : G_CALLBACK(&GstGenericPlayer::deepElementAdded), this);
231 :
232 : // Set uri
233 205 : m_glibWrapper->gObjectSet(m_context.pipeline, "uri", "rialto://", nullptr);
234 :
235 : // Check playsink
236 205 : GstElement *playsink = (m_gstWrapper->gstBinGetByName(GST_BIN(m_context.pipeline), "playsink"));
237 205 : if (playsink)
238 : {
239 204 : m_glibWrapper->gObjectSet(G_OBJECT(playsink), "send-event-mode", 0, nullptr);
240 204 : m_gstWrapper->gstObjectUnref(playsink);
241 : }
242 : else
243 : {
244 1 : GST_WARNING("No playsink ?!?!?");
245 : }
246 205 : }
247 :
248 206 : void GstGenericPlayer::resetWorkerThread()
249 : {
250 206 : m_workerThread.reset();
251 : }
252 :
253 205 : void GstGenericPlayer::termPipeline()
254 : {
255 205 : if (m_finishSourceSetupTimer && m_finishSourceSetupTimer->isActive())
256 : {
257 0 : m_finishSourceSetupTimer->cancel();
258 : }
259 :
260 205 : m_finishSourceSetupTimer.reset();
261 :
262 254 : for (auto &elem : m_context.streamInfo)
263 : {
264 49 : StreamInfo &streamInfo = elem.second;
265 51 : for (auto &buffer : streamInfo.buffers)
266 : {
267 2 : m_gstWrapper->gstBufferUnref(buffer);
268 : }
269 :
270 49 : streamInfo.buffers.clear();
271 : }
272 :
273 205 : m_taskFactory->createStop(m_context, *this)->execute();
274 205 : GstBus *bus = m_gstWrapper->gstPipelineGetBus(GST_PIPELINE(m_context.pipeline));
275 205 : m_gstWrapper->gstBusSetSyncHandler(bus, nullptr, nullptr, nullptr);
276 205 : m_gstWrapper->gstObjectUnref(bus);
277 :
278 205 : if (m_context.source)
279 : {
280 1 : m_gstWrapper->gstObjectUnref(m_context.source);
281 : }
282 205 : if (m_context.subtitleSink)
283 : {
284 4 : m_gstWrapper->gstObjectUnref(m_context.subtitleSink);
285 : }
286 :
287 : // Delete the pipeline
288 205 : m_gstWrapper->gstObjectUnref(m_context.pipeline);
289 : }
290 :
291 821 : unsigned GstGenericPlayer::getGstPlayFlag(const char *nick)
292 : {
293 : GFlagsClass *flagsClass =
294 821 : static_cast<GFlagsClass *>(m_glibWrapper->gTypeClassRef(m_glibWrapper->gTypeFromName("GstPlayFlags")));
295 821 : GFlagsValue *flag = m_glibWrapper->gFlagsGetValueByNick(flagsClass, nick);
296 821 : return flag ? flag->value : 0;
297 : }
298 :
299 1 : void GstGenericPlayer::setupSource(GstElement *pipeline, GstElement *source, GstGenericPlayer *self)
300 : {
301 1 : self->m_gstWrapper->gstObjectRef(source);
302 1 : if (self->m_workerThread)
303 : {
304 1 : self->m_workerThread->enqueueTask(self->m_taskFactory->createSetupSource(self->m_context, *self, source));
305 : }
306 : }
307 :
308 1 : void GstGenericPlayer::setupElement(GstElement *pipeline, GstElement *element, GstGenericPlayer *self)
309 : {
310 1 : RIALTO_SERVER_LOG_DEBUG("Element %s added to the pipeline", GST_ELEMENT_NAME(element));
311 1 : self->m_gstWrapper->gstObjectRef(element);
312 1 : if (self->m_workerThread)
313 : {
314 1 : self->m_workerThread->enqueueTask(self->m_taskFactory->createSetupElement(self->m_context, *self, element));
315 : }
316 : }
317 :
318 1 : void GstGenericPlayer::deepElementAdded(GstBin *pipeline, GstBin *bin, GstElement *element, GstGenericPlayer *self)
319 : {
320 1 : RIALTO_SERVER_LOG_DEBUG("Deep element %s added to the pipeline", GST_ELEMENT_NAME(element));
321 1 : if (self->m_workerThread)
322 : {
323 2 : self->m_workerThread->enqueueTask(
324 2 : self->m_taskFactory->createDeepElementAdded(self->m_context, *self, pipeline, bin, element));
325 : }
326 1 : }
327 :
328 1 : void GstGenericPlayer::attachSource(const std::unique_ptr<IMediaPipeline::MediaSource> &attachedSource)
329 : {
330 1 : if (m_workerThread)
331 : {
332 1 : m_workerThread->enqueueTask(m_taskFactory->createAttachSource(m_context, *this, attachedSource));
333 : }
334 : }
335 :
336 1 : void GstGenericPlayer::removeSource(const MediaSourceType &mediaSourceType)
337 : {
338 1 : if (m_workerThread)
339 : {
340 1 : m_workerThread->enqueueTask(m_taskFactory->createRemoveSource(m_context, *this, mediaSourceType));
341 : }
342 : }
343 :
344 2 : void GstGenericPlayer::allSourcesAttached()
345 : {
346 2 : if (m_workerThread)
347 : {
348 2 : m_workerThread->enqueueTask(m_taskFactory->createFinishSetupSource(m_context, *this));
349 : }
350 : }
351 :
352 1 : void GstGenericPlayer::attachSamples(const IMediaPipeline::MediaSegmentVector &mediaSegments)
353 : {
354 1 : if (m_workerThread)
355 : {
356 1 : m_workerThread->enqueueTask(m_taskFactory->createAttachSamples(m_context, *this, mediaSegments));
357 : }
358 : }
359 :
360 1 : void GstGenericPlayer::attachSamples(const std::shared_ptr<IDataReader> &dataReader)
361 : {
362 1 : if (m_workerThread)
363 : {
364 1 : m_workerThread->enqueueTask(m_taskFactory->createReadShmDataAndAttachSamples(m_context, *this, dataReader));
365 : }
366 : }
367 :
368 1 : void GstGenericPlayer::setPosition(std::int64_t position)
369 : {
370 1 : if (m_workerThread)
371 : {
372 1 : m_workerThread->enqueueTask(m_taskFactory->createSetPosition(m_context, *this, position));
373 : }
374 : }
375 :
376 1 : void GstGenericPlayer::setPlaybackRate(double rate)
377 : {
378 1 : if (m_workerThread)
379 : {
380 1 : m_workerThread->enqueueTask(m_taskFactory->createSetPlaybackRate(m_context, rate));
381 : }
382 : }
383 :
384 4 : bool GstGenericPlayer::getPosition(std::int64_t &position)
385 : {
386 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
387 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
388 4 : if (!m_context.pipeline || GST_STATE(m_context.pipeline) < GST_STATE_PAUSED)
389 : {
390 1 : RIALTO_SERVER_LOG_WARN("GetPosition failed. Pipeline is null or state < PAUSED");
391 1 : return false;
392 : }
393 3 : if (!m_gstWrapper->gstElementQueryPosition(m_context.pipeline, GST_FORMAT_TIME, &position))
394 : {
395 1 : return false;
396 : }
397 2 : return true;
398 : }
399 :
400 38 : GstElement *GstGenericPlayer::getSink(const MediaSourceType &mediaSourceType) const
401 : {
402 38 : const char *kSinkName{nullptr};
403 38 : GstElement *sink{nullptr};
404 38 : switch (mediaSourceType)
405 : {
406 18 : case MediaSourceType::AUDIO:
407 18 : kSinkName = "audio-sink";
408 18 : break;
409 18 : case MediaSourceType::VIDEO:
410 18 : kSinkName = "video-sink";
411 18 : break;
412 2 : default:
413 2 : break;
414 : }
415 38 : if (!kSinkName)
416 : {
417 2 : RIALTO_SERVER_LOG_WARN("mediaSourceType not supported %d", static_cast<int>(mediaSourceType));
418 : }
419 : else
420 : {
421 36 : if (m_context.pipeline == nullptr)
422 : {
423 0 : RIALTO_SERVER_LOG_WARN("Pipeline is NULL!");
424 : }
425 : else
426 : {
427 36 : RIALTO_SERVER_LOG_DEBUG("Pipeline is valid: %p", m_context.pipeline);
428 : }
429 36 : m_glibWrapper->gObjectGet(m_context.pipeline, kSinkName, &sink, nullptr);
430 36 : if (sink)
431 : {
432 25 : GstElement *autoSink{sink};
433 25 : if (firebolt::rialto::MediaSourceType::VIDEO == mediaSourceType)
434 14 : autoSink = getSinkChildIfAutoVideoSink(sink);
435 11 : else if (firebolt::rialto::MediaSourceType::AUDIO == mediaSourceType)
436 11 : autoSink = getSinkChildIfAutoAudioSink(sink);
437 :
438 : // Is this an auto-sink?...
439 25 : if (autoSink != sink)
440 : {
441 2 : m_gstWrapper->gstObjectUnref(GST_OBJECT(sink));
442 :
443 : // increase the reference count of the auto sink
444 2 : sink = GST_ELEMENT(m_gstWrapper->gstObjectRef(GST_OBJECT(autoSink)));
445 : }
446 : }
447 : else
448 : {
449 11 : RIALTO_SERVER_LOG_WARN("%s could not be obtained", kSinkName);
450 : }
451 : }
452 38 : return sink;
453 : }
454 :
455 1 : void GstGenericPlayer::setSourceFlushed(const MediaSourceType &mediaSourceType)
456 : {
457 1 : m_flushWatcher->setFlushed(mediaSourceType);
458 : }
459 :
460 19 : GstElement *GstGenericPlayer::getDecoder(const MediaSourceType &mediaSourceType)
461 : {
462 19 : GstIterator *it = m_gstWrapper->gstBinIterateRecurse(GST_BIN(m_context.pipeline));
463 19 : GValue item = G_VALUE_INIT;
464 19 : gboolean done = FALSE;
465 :
466 28 : while (!done)
467 : {
468 21 : switch (m_gstWrapper->gstIteratorNext(it, &item))
469 : {
470 12 : case GST_ITERATOR_OK:
471 : {
472 12 : GstElement *element = GST_ELEMENT(m_glibWrapper->gValueGetObject(&item));
473 12 : GstElementFactory *factory = m_gstWrapper->gstElementGetFactory(element);
474 :
475 12 : if (factory)
476 : {
477 12 : GstElementFactoryListType type = GST_ELEMENT_FACTORY_TYPE_DECODER;
478 12 : if (mediaSourceType == MediaSourceType::AUDIO)
479 : {
480 12 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO;
481 : }
482 0 : else if (mediaSourceType == MediaSourceType::VIDEO)
483 : {
484 0 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO;
485 : }
486 :
487 12 : if (m_gstWrapper->gstElementFactoryListIsType(factory, type))
488 : {
489 12 : m_glibWrapper->gValueUnset(&item);
490 12 : m_gstWrapper->gstIteratorFree(it);
491 12 : return GST_ELEMENT(m_gstWrapper->gstObjectRef(element));
492 : }
493 : }
494 :
495 0 : m_glibWrapper->gValueUnset(&item);
496 0 : break;
497 : }
498 2 : case GST_ITERATOR_RESYNC:
499 2 : m_gstWrapper->gstIteratorResync(it);
500 2 : break;
501 7 : case GST_ITERATOR_ERROR:
502 : case GST_ITERATOR_DONE:
503 7 : done = TRUE;
504 7 : break;
505 : }
506 : }
507 :
508 7 : RIALTO_SERVER_LOG_WARN("Could not find decoder");
509 :
510 7 : m_glibWrapper->gValueUnset(&item);
511 7 : m_gstWrapper->gstIteratorFree(it);
512 :
513 7 : return nullptr;
514 : }
515 :
516 3 : GstElement *GstGenericPlayer::getParser(const MediaSourceType &mediaSourceType)
517 : {
518 3 : GstIterator *it = m_gstWrapper->gstBinIterateRecurse(GST_BIN(m_context.pipeline));
519 3 : GValue item = G_VALUE_INIT;
520 3 : gboolean done = FALSE;
521 :
522 4 : while (!done)
523 : {
524 3 : switch (m_gstWrapper->gstIteratorNext(it, &item))
525 : {
526 2 : case GST_ITERATOR_OK:
527 : {
528 2 : GstElement *element = GST_ELEMENT(m_glibWrapper->gValueGetObject(&item));
529 2 : GstElementFactory *factory = m_gstWrapper->gstElementGetFactory(element);
530 :
531 2 : if (factory)
532 : {
533 2 : GstElementFactoryListType type = GST_ELEMENT_FACTORY_TYPE_PARSER;
534 2 : if (mediaSourceType == MediaSourceType::AUDIO)
535 : {
536 0 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO;
537 : }
538 2 : else if (mediaSourceType == MediaSourceType::VIDEO)
539 : {
540 2 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO;
541 : }
542 :
543 2 : if (m_gstWrapper->gstElementFactoryListIsType(factory, type))
544 : {
545 2 : m_glibWrapper->gValueUnset(&item);
546 2 : m_gstWrapper->gstIteratorFree(it);
547 2 : return GST_ELEMENT(m_gstWrapper->gstObjectRef(element));
548 : }
549 : }
550 :
551 0 : m_glibWrapper->gValueUnset(&item);
552 0 : break;
553 : }
554 0 : case GST_ITERATOR_RESYNC:
555 0 : m_gstWrapper->gstIteratorResync(it);
556 0 : break;
557 1 : case GST_ITERATOR_ERROR:
558 : case GST_ITERATOR_DONE:
559 1 : done = TRUE;
560 1 : break;
561 : }
562 : }
563 :
564 1 : RIALTO_SERVER_LOG_WARN("Could not find parser");
565 :
566 1 : m_glibWrapper->gValueUnset(&item);
567 1 : m_gstWrapper->gstIteratorFree(it);
568 :
569 1 : return nullptr;
570 : }
571 :
572 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate>
573 5 : GstGenericPlayer::createAudioAttributes(const std::unique_ptr<IMediaPipeline::MediaSource> &source) const
574 : {
575 5 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate> audioAttributes;
576 5 : const IMediaPipeline::MediaSourceAudio *kSource = dynamic_cast<IMediaPipeline::MediaSourceAudio *>(source.get());
577 5 : if (kSource)
578 : {
579 4 : firebolt::rialto::AudioConfig audioConfig = kSource->getAudioConfig();
580 : audioAttributes =
581 12 : firebolt::rialto::wrappers::AudioAttributesPrivate{"", // param set below.
582 4 : audioConfig.numberOfChannels, audioConfig.sampleRate,
583 : 0, // used only in one of logs in rdk_gstreamer_utils, no
584 : // need to set this param.
585 : 0, // used only in one of logs in rdk_gstreamer_utils, no
586 : // need to set this param.
587 4 : audioConfig.codecSpecificConfig.data(),
588 : static_cast<std::uint32_t>(
589 4 : audioConfig.codecSpecificConfig.size())};
590 4 : if (source->getMimeType() == "audio/mp4" || source->getMimeType() == "audio/aac")
591 : {
592 2 : audioAttributes->m_codecParam = "mp4a";
593 : }
594 2 : else if (source->getMimeType() == "audio/x-eac3")
595 : {
596 1 : audioAttributes->m_codecParam = "ec-3";
597 : }
598 1 : else if (source->getMimeType() == "audio/b-wav" || source->getMimeType() == "audio/x-raw")
599 : {
600 1 : audioAttributes->m_codecParam = "lpcm";
601 : }
602 4 : }
603 : else
604 : {
605 1 : RIALTO_SERVER_LOG_ERROR("Failed to cast source");
606 : }
607 :
608 5 : return audioAttributes;
609 : }
610 :
611 1 : bool GstGenericPlayer::setImmediateOutput(const MediaSourceType &mediaSourceType, bool immediateOutputParam)
612 : {
613 1 : if (!m_workerThread)
614 0 : return false;
615 :
616 2 : m_workerThread->enqueueTask(
617 2 : m_taskFactory->createSetImmediateOutput(m_context, *this, mediaSourceType, immediateOutputParam));
618 1 : return true;
619 : }
620 :
621 5 : bool GstGenericPlayer::getImmediateOutput(const MediaSourceType &mediaSourceType, bool &immediateOutputRef)
622 : {
623 5 : bool returnValue{false};
624 5 : GstElement *sink{getSink(mediaSourceType)};
625 5 : if (sink)
626 : {
627 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "immediate-output"))
628 : {
629 2 : m_glibWrapper->gObjectGet(sink, "immediate-output", &immediateOutputRef, nullptr);
630 2 : returnValue = true;
631 : }
632 : else
633 : {
634 1 : RIALTO_SERVER_LOG_ERROR("immediate-output not supported in element %s", GST_ELEMENT_NAME(sink));
635 : }
636 3 : m_gstWrapper->gstObjectUnref(sink);
637 : }
638 : else
639 : {
640 2 : RIALTO_SERVER_LOG_ERROR("Failed to set immediate-output property, sink is NULL");
641 : }
642 :
643 5 : return returnValue;
644 : }
645 :
646 5 : bool GstGenericPlayer::getStats(const MediaSourceType &mediaSourceType, uint64_t &renderedFrames, uint64_t &droppedFrames)
647 : {
648 5 : bool returnValue{false};
649 5 : GstElement *sink{getSink(mediaSourceType)};
650 5 : if (sink)
651 : {
652 3 : GstStructure *stats{nullptr};
653 3 : m_glibWrapper->gObjectGet(sink, "stats", &stats, nullptr);
654 3 : if (!stats)
655 : {
656 1 : RIALTO_SERVER_LOG_ERROR("failed to get stats from '%s'", GST_ELEMENT_NAME(sink));
657 : }
658 : else
659 : {
660 : guint64 renderedFramesTmp;
661 : guint64 droppedFramesTmp;
662 3 : if (m_gstWrapper->gstStructureGetUint64(stats, "rendered", &renderedFramesTmp) &&
663 1 : m_gstWrapper->gstStructureGetUint64(stats, "dropped", &droppedFramesTmp))
664 : {
665 1 : renderedFrames = renderedFramesTmp;
666 1 : droppedFrames = droppedFramesTmp;
667 1 : returnValue = true;
668 : }
669 : else
670 : {
671 1 : RIALTO_SERVER_LOG_ERROR("failed to get 'rendered' or 'dropped' from structure (%s)",
672 : GST_ELEMENT_NAME(sink));
673 : }
674 2 : m_gstWrapper->gstStructureFree(stats);
675 : }
676 3 : m_gstWrapper->gstObjectUnref(sink);
677 : }
678 :
679 5 : return returnValue;
680 : }
681 :
682 4 : GstBuffer *GstGenericPlayer::createBuffer(const IMediaPipeline::MediaSegment &mediaSegment) const
683 : {
684 4 : GstBuffer *gstBuffer = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getDataLength(), nullptr);
685 4 : m_gstWrapper->gstBufferFill(gstBuffer, 0, mediaSegment.getData(), mediaSegment.getDataLength());
686 :
687 4 : if (mediaSegment.isEncrypted())
688 : {
689 3 : GstBuffer *keyId = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getKeyId().size(), nullptr);
690 3 : m_gstWrapper->gstBufferFill(keyId, 0, mediaSegment.getKeyId().data(), mediaSegment.getKeyId().size());
691 :
692 3 : GstBuffer *initVector = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getInitVector().size(), nullptr);
693 6 : m_gstWrapper->gstBufferFill(initVector, 0, mediaSegment.getInitVector().data(),
694 3 : mediaSegment.getInitVector().size());
695 3 : GstBuffer *subsamples{nullptr};
696 3 : if (!mediaSegment.getSubSamples().empty())
697 : {
698 3 : auto subsamplesRawSize = mediaSegment.getSubSamples().size() * (sizeof(guint16) + sizeof(guint32));
699 3 : guint8 *subsamplesRaw = static_cast<guint8 *>(m_glibWrapper->gMalloc(subsamplesRawSize));
700 : GstByteWriter writer;
701 3 : m_gstWrapper->gstByteWriterInitWithData(&writer, subsamplesRaw, subsamplesRawSize, FALSE);
702 :
703 6 : for (const auto &subSample : mediaSegment.getSubSamples())
704 : {
705 3 : m_gstWrapper->gstByteWriterPutUint16Be(&writer, subSample.numClearBytes);
706 3 : m_gstWrapper->gstByteWriterPutUint32Be(&writer, subSample.numEncryptedBytes);
707 : }
708 3 : subsamples = m_gstWrapper->gstBufferNewWrapped(subsamplesRaw, subsamplesRawSize);
709 : }
710 :
711 3 : uint32_t crypt = 0;
712 3 : uint32_t skip = 0;
713 3 : bool encryptionPatternSet = mediaSegment.getEncryptionPattern(crypt, skip);
714 :
715 3 : GstRialtoProtectionData data = {mediaSegment.getMediaKeySessionId(),
716 3 : static_cast<uint32_t>(mediaSegment.getSubSamples().size()),
717 3 : mediaSegment.getInitWithLast15(),
718 : keyId,
719 : initVector,
720 : subsamples,
721 6 : mediaSegment.getCipherMode(),
722 : crypt,
723 : skip,
724 : encryptionPatternSet,
725 6 : m_context.decryptionService};
726 :
727 3 : if (!m_protectionMetadataWrapper->addProtectionMetadata(gstBuffer, data))
728 : {
729 1 : RIALTO_SERVER_LOG_ERROR("Failed to add protection metadata");
730 1 : if (keyId)
731 : {
732 1 : m_gstWrapper->gstBufferUnref(keyId);
733 : }
734 1 : if (initVector)
735 : {
736 1 : m_gstWrapper->gstBufferUnref(initVector);
737 : }
738 1 : if (subsamples)
739 : {
740 1 : m_gstWrapper->gstBufferUnref(subsamples);
741 : }
742 : }
743 : }
744 :
745 4 : GST_BUFFER_TIMESTAMP(gstBuffer) = mediaSegment.getTimeStamp();
746 4 : GST_BUFFER_DURATION(gstBuffer) = mediaSegment.getDuration();
747 4 : return gstBuffer;
748 : }
749 :
750 4 : void GstGenericPlayer::notifyNeedMediaData(const MediaSourceType mediaSource)
751 : {
752 4 : auto elem = m_context.streamInfo.find(mediaSource);
753 4 : if (elem != m_context.streamInfo.end())
754 : {
755 2 : StreamInfo &streamInfo = elem->second;
756 2 : streamInfo.isNeedDataPending = false;
757 :
758 : // Send new NeedMediaData if we still need it
759 2 : if (m_gstPlayerClient && streamInfo.isDataNeeded)
760 : {
761 2 : streamInfo.isNeedDataPending = m_gstPlayerClient->notifyNeedMediaData(mediaSource);
762 : }
763 : }
764 : else
765 : {
766 2 : RIALTO_SERVER_LOG_WARN("Media type %s could not be found", common::convertMediaSourceType(mediaSource));
767 : }
768 4 : }
769 :
770 19 : void GstGenericPlayer::attachData(const firebolt::rialto::MediaSourceType mediaType)
771 : {
772 19 : auto elem = m_context.streamInfo.find(mediaType);
773 19 : if (elem != m_context.streamInfo.end())
774 : {
775 16 : StreamInfo &streamInfo = elem->second;
776 16 : if (streamInfo.buffers.empty() || !streamInfo.isDataNeeded)
777 : {
778 2 : return;
779 : }
780 :
781 14 : if (firebolt::rialto::MediaSourceType::SUBTITLE == mediaType)
782 : {
783 2 : setTextTrackPositionIfRequired(streamInfo.appSrc);
784 : }
785 : else
786 : {
787 36 : pushSampleIfRequired(streamInfo.appSrc, common::convertMediaSourceType(mediaType));
788 : }
789 14 : if (mediaType == firebolt::rialto::MediaSourceType::AUDIO)
790 : {
791 : // This needs to be done before gstAppSrcPushBuffer() is
792 : // called because it can free the memory
793 7 : m_context.lastAudioSampleTimestamps = static_cast<int64_t>(GST_BUFFER_PTS(streamInfo.buffers.back()));
794 : }
795 :
796 28 : for (GstBuffer *buffer : streamInfo.buffers)
797 : {
798 14 : m_gstWrapper->gstAppSrcPushBuffer(GST_APP_SRC(streamInfo.appSrc), buffer);
799 : }
800 14 : streamInfo.buffers.clear();
801 14 : streamInfo.isDataPushed = true;
802 :
803 14 : const bool kIsSingle = m_context.streamInfo.size() == 1;
804 14 : bool allOtherStreamsPushed = std::all_of(m_context.streamInfo.begin(), m_context.streamInfo.end(),
805 15 : [](const auto &entry) { return entry.second.isDataPushed; });
806 :
807 14 : if (!m_context.bufferedNotificationSent && (allOtherStreamsPushed || kIsSingle) && m_gstPlayerClient)
808 : {
809 1 : m_context.bufferedNotificationSent = true;
810 1 : m_gstPlayerClient->notifyNetworkState(NetworkState::BUFFERED);
811 : }
812 14 : cancelUnderflow(mediaType);
813 :
814 14 : const auto eosInfoIt = m_context.endOfStreamInfo.find(mediaType);
815 14 : if (eosInfoIt != m_context.endOfStreamInfo.end() && eosInfoIt->second == EosState::PENDING)
816 : {
817 0 : setEos(mediaType);
818 : }
819 : }
820 : }
821 :
822 7 : void GstGenericPlayer::updateAudioCaps(int32_t rate, int32_t channels, const std::shared_ptr<CodecData> &codecData)
823 : {
824 7 : auto elem = m_context.streamInfo.find(firebolt::rialto::MediaSourceType::AUDIO);
825 7 : if (elem != m_context.streamInfo.end())
826 : {
827 6 : StreamInfo &streamInfo = elem->second;
828 :
829 6 : constexpr int kInvalidRate{0}, kInvalidChannels{0};
830 6 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(streamInfo.appSrc));
831 6 : GstCaps *newCaps = m_gstWrapper->gstCapsCopy(currentCaps);
832 :
833 6 : if (rate != kInvalidRate)
834 : {
835 3 : m_gstWrapper->gstCapsSetSimple(newCaps, "rate", G_TYPE_INT, rate, NULL);
836 : }
837 :
838 6 : if (channels != kInvalidChannels)
839 : {
840 3 : m_gstWrapper->gstCapsSetSimple(newCaps, "channels", G_TYPE_INT, channels, NULL);
841 : }
842 :
843 6 : setCodecData(newCaps, codecData);
844 :
845 6 : if (!m_gstWrapper->gstCapsIsEqual(currentCaps, newCaps))
846 : {
847 5 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(streamInfo.appSrc), newCaps);
848 : }
849 :
850 6 : m_gstWrapper->gstCapsUnref(newCaps);
851 6 : m_gstWrapper->gstCapsUnref(currentCaps);
852 : }
853 7 : }
854 :
855 8 : void GstGenericPlayer::updateVideoCaps(int32_t width, int32_t height, Fraction frameRate,
856 : const std::shared_ptr<CodecData> &codecData)
857 : {
858 8 : auto elem = m_context.streamInfo.find(firebolt::rialto::MediaSourceType::VIDEO);
859 8 : if (elem != m_context.streamInfo.end())
860 : {
861 7 : StreamInfo &streamInfo = elem->second;
862 :
863 7 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(streamInfo.appSrc));
864 7 : GstCaps *newCaps = m_gstWrapper->gstCapsCopy(currentCaps);
865 :
866 7 : if (width > 0)
867 : {
868 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "width", G_TYPE_INT, width, NULL);
869 : }
870 :
871 7 : if (height > 0)
872 : {
873 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "height", G_TYPE_INT, height, NULL);
874 : }
875 :
876 7 : if ((kUndefinedSize != frameRate.numerator) && (kUndefinedSize != frameRate.denominator))
877 : {
878 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "framerate", GST_TYPE_FRACTION, frameRate.numerator,
879 : frameRate.denominator, NULL);
880 : }
881 :
882 7 : setCodecData(newCaps, codecData);
883 :
884 7 : if (!m_gstWrapper->gstCapsIsEqual(currentCaps, newCaps))
885 : {
886 6 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(streamInfo.appSrc), newCaps);
887 : }
888 :
889 7 : m_gstWrapper->gstCapsUnref(currentCaps);
890 7 : m_gstWrapper->gstCapsUnref(newCaps);
891 : }
892 8 : }
893 :
894 5 : void GstGenericPlayer::addAudioClippingToBuffer(GstBuffer *buffer, uint64_t clippingStart, uint64_t clippingEnd) const
895 : {
896 5 : if (clippingStart || clippingEnd)
897 : {
898 4 : if (m_gstWrapper->gstBufferAddAudioClippingMeta(buffer, GST_FORMAT_TIME, clippingStart, clippingEnd))
899 : {
900 3 : RIALTO_SERVER_LOG_DEBUG("Added audio clipping to buffer %p, start: %" PRIu64 ", end %" PRIu64, buffer,
901 : clippingStart, clippingEnd);
902 : }
903 : else
904 : {
905 1 : RIALTO_SERVER_LOG_WARN("Failed to add audio clipping to buffer %p, start: %" PRIu64 ", end %" PRIu64,
906 : buffer, clippingStart, clippingEnd);
907 : }
908 : }
909 5 : }
910 :
911 13 : bool GstGenericPlayer::setCodecData(GstCaps *caps, const std::shared_ptr<CodecData> &codecData) const
912 : {
913 13 : if (codecData && CodecDataType::BUFFER == codecData->type)
914 : {
915 7 : gpointer memory = m_glibWrapper->gMemdup(codecData->data.data(), codecData->data.size());
916 7 : GstBuffer *buf = m_gstWrapper->gstBufferNewWrapped(memory, codecData->data.size());
917 7 : m_gstWrapper->gstCapsSetSimple(caps, "codec_data", GST_TYPE_BUFFER, buf, nullptr);
918 7 : m_gstWrapper->gstBufferUnref(buf);
919 7 : return true;
920 : }
921 6 : if (codecData && CodecDataType::STRING == codecData->type)
922 : {
923 2 : std::string codecDataStr(codecData->data.begin(), codecData->data.end());
924 2 : m_gstWrapper->gstCapsSetSimple(caps, "codec_data", G_TYPE_STRING, codecDataStr.c_str(), nullptr);
925 2 : return true;
926 : }
927 4 : return false;
928 : }
929 :
930 12 : void GstGenericPlayer::pushSampleIfRequired(GstElement *source, const std::string &typeStr)
931 : {
932 12 : auto initialPosition = m_context.initialPositions.find(source);
933 12 : if (m_context.initialPositions.end() == initialPosition)
934 : {
935 : // Sending initial sample not needed
936 7 : return;
937 : }
938 : // GstAppSrc does not replace segment, if it's the same as previous one.
939 : // It causes problems with position reporing in amlogic devices, so we need to push
940 : // two segments with different reset time value.
941 5 : pushAdditionalSegmentIfRequired(source);
942 :
943 10 : for (const auto &[position, resetTime, appliedRate, stopPosition] : initialPosition->second)
944 : {
945 6 : GstSeekFlags seekFlag = resetTime ? GST_SEEK_FLAG_FLUSH : GST_SEEK_FLAG_NONE;
946 6 : RIALTO_SERVER_LOG_DEBUG("Pushing new %s sample...", typeStr.c_str());
947 6 : GstSegment *segment{m_gstWrapper->gstSegmentNew()};
948 6 : m_gstWrapper->gstSegmentInit(segment, GST_FORMAT_TIME);
949 6 : if (!m_gstWrapper->gstSegmentDoSeek(segment, m_context.playbackRate, GST_FORMAT_TIME, seekFlag,
950 : GST_SEEK_TYPE_SET, position, GST_SEEK_TYPE_SET, stopPosition, nullptr))
951 : {
952 1 : RIALTO_SERVER_LOG_WARN("Segment seek failed.");
953 1 : m_gstWrapper->gstSegmentFree(segment);
954 1 : m_context.initialPositions.erase(initialPosition);
955 1 : return;
956 : }
957 5 : segment->applied_rate = appliedRate;
958 5 : RIALTO_SERVER_LOG_MIL("New %s segment: [%" GST_TIME_FORMAT ", %" GST_TIME_FORMAT
959 : "], rate: %f, appliedRate %f, reset_time: %d\n",
960 : typeStr.c_str(), GST_TIME_ARGS(segment->start), GST_TIME_ARGS(segment->stop),
961 : segment->rate, segment->applied_rate, resetTime);
962 :
963 5 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(source));
964 : // We can't pass buffer in GstSample, because implementation of gst_app_src_push_sample
965 : // uses gst_buffer_copy, which loses RialtoProtectionMeta (that causes problems with EME
966 : // for first frame).
967 5 : GstSample *sample = m_gstWrapper->gstSampleNew(nullptr, currentCaps, segment, nullptr);
968 5 : m_gstWrapper->gstAppSrcPushSample(GST_APP_SRC(source), sample);
969 5 : m_gstWrapper->gstSampleUnref(sample);
970 5 : m_gstWrapper->gstCapsUnref(currentCaps);
971 :
972 5 : m_gstWrapper->gstSegmentFree(segment);
973 : }
974 4 : m_context.currentPosition[source] = initialPosition->second.back();
975 4 : m_context.initialPositions.erase(initialPosition);
976 4 : return;
977 : }
978 :
979 5 : void GstGenericPlayer::pushAdditionalSegmentIfRequired(GstElement *source)
980 : {
981 5 : auto currentPosition = m_context.currentPosition.find(source);
982 5 : if (m_context.currentPosition.end() == currentPosition)
983 : {
984 4 : return;
985 : }
986 1 : auto initialPosition = m_context.initialPositions.find(source);
987 1 : if (m_context.initialPositions.end() == initialPosition)
988 : {
989 0 : return;
990 : }
991 2 : if (initialPosition->second.size() == 1 && initialPosition->second.back().resetTime &&
992 1 : currentPosition->second == initialPosition->second.back())
993 : {
994 1 : RIALTO_SERVER_LOG_INFO("Adding additional segment with reset_time = false");
995 1 : SegmentData additionalSegment = initialPosition->second.back();
996 1 : additionalSegment.resetTime = false;
997 1 : initialPosition->second.push_back(additionalSegment);
998 : }
999 : }
1000 :
1001 2 : void GstGenericPlayer::setTextTrackPositionIfRequired(GstElement *source)
1002 : {
1003 2 : auto initialPosition = m_context.initialPositions.find(source);
1004 2 : if (m_context.initialPositions.end() == initialPosition)
1005 : {
1006 : // Sending initial sample not needed
1007 1 : return;
1008 : }
1009 :
1010 1 : m_glibWrapper->gObjectSet(m_context.subtitleSink, "position",
1011 1 : static_cast<guint64>(initialPosition->second.back().position), nullptr);
1012 :
1013 1 : m_context.initialPositions.erase(initialPosition);
1014 : }
1015 :
1016 7 : bool GstGenericPlayer::reattachSource(const std::unique_ptr<IMediaPipeline::MediaSource> &source)
1017 : {
1018 7 : if (m_context.streamInfo.find(source->getType()) == m_context.streamInfo.end())
1019 : {
1020 1 : RIALTO_SERVER_LOG_ERROR("Unable to switch source, type does not exist");
1021 1 : return false;
1022 : }
1023 6 : if (source->getMimeType().empty())
1024 : {
1025 1 : RIALTO_SERVER_LOG_WARN("Skip switch audio source. Unknown mime type");
1026 1 : return false;
1027 : }
1028 5 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate> audioAttributes{createAudioAttributes(source)};
1029 5 : if (!audioAttributes)
1030 : {
1031 1 : RIALTO_SERVER_LOG_ERROR("Failed to create audio attributes");
1032 1 : return false;
1033 : }
1034 : std::int64_t currentDispPts64b; // In netflix code it's currentDisplayPosition + offset
1035 4 : m_gstWrapper->gstElementQueryPosition(m_context.pipeline, GST_FORMAT_TIME, ¤tDispPts64b);
1036 4 : long long currentDispPts = currentDispPts64b; // NOLINT(runtime/int)
1037 4 : GstCaps *caps{createCapsFromMediaSource(m_gstWrapper, m_glibWrapper, source)};
1038 4 : GstAppSrc *appSrc{GST_APP_SRC(m_context.streamInfo[source->getType()].appSrc)};
1039 4 : GstCaps *oldCaps = m_gstWrapper->gstAppSrcGetCaps(appSrc);
1040 4 : if ((!oldCaps) || (!m_gstWrapper->gstCapsIsEqual(caps, oldCaps)))
1041 : {
1042 3 : RIALTO_SERVER_LOG_DEBUG("Caps not equal. Perform audio track codec channel switch.");
1043 3 : int sampleAttributes{
1044 : 0}; // rdk_gstreamer_utils::performAudioTrackCodecChannelSwitch checks if this param != NULL only.
1045 3 : std::uint32_t status{0}; // must be 0 to make rdk_gstreamer_utils::performAudioTrackCodecChannelSwitch work
1046 3 : unsigned int ui32Delay{0}; // output param
1047 3 : long long audioChangeTargetPts{-1}; // NOLINT(runtime/int) output param. Set audioChangeTargetPts =
1048 : // currentDispPts in rdk_gstreamer_utils function stub
1049 3 : unsigned int audioChangeStage{0}; // Output param. Set to AUDCHG_ALIGN in rdk_gstreamer_utils function stub
1050 3 : gchar *oldCapsCStr = m_gstWrapper->gstCapsToString(oldCaps);
1051 3 : std::string oldCapsStr = std::string(oldCapsCStr);
1052 3 : m_glibWrapper->gFree(oldCapsCStr);
1053 3 : bool audioAac{oldCapsStr.find("audio/mpeg") != std::string::npos};
1054 3 : bool svpEnabled{true}; // assume always true
1055 3 : bool retVal{false}; // Output param. Set to TRUE in rdk_gstreamer_utils function stub
1056 : bool result =
1057 3 : m_rdkGstreamerUtilsWrapper
1058 6 : ->performAudioTrackCodecChannelSwitch(&m_context.playbackGroup, &sampleAttributes, &(*audioAttributes),
1059 : &status, &ui32Delay, &audioChangeTargetPts, ¤tDispPts,
1060 : &audioChangeStage,
1061 : &caps, // may fail for amlogic - that implementation changes
1062 : // this parameter, it's probably used by Netflix later
1063 3 : &audioAac, svpEnabled, GST_ELEMENT(appSrc), &retVal);
1064 :
1065 3 : if (!result || !retVal)
1066 : {
1067 3 : RIALTO_SERVER_LOG_WARN("performAudioTrackCodecChannelSwitch failed! Result: %d, retval %d", result, retVal);
1068 : }
1069 : }
1070 : else
1071 : {
1072 1 : RIALTO_SERVER_LOG_DEBUG("Skip switching audio source - caps are the same.");
1073 : }
1074 :
1075 4 : m_context.lastAudioSampleTimestamps = currentDispPts;
1076 4 : if (caps)
1077 4 : m_gstWrapper->gstCapsUnref(caps);
1078 4 : if (oldCaps)
1079 4 : m_gstWrapper->gstCapsUnref(oldCaps);
1080 :
1081 4 : return true;
1082 5 : }
1083 :
1084 88 : void GstGenericPlayer::scheduleNeedMediaData(GstAppSrc *src)
1085 : {
1086 88 : if (m_workerThread)
1087 : {
1088 88 : m_workerThread->enqueueTask(m_taskFactory->createNeedData(m_context, *this, src));
1089 : }
1090 : }
1091 :
1092 1 : void GstGenericPlayer::scheduleEnoughData(GstAppSrc *src)
1093 : {
1094 1 : if (m_workerThread)
1095 : {
1096 1 : m_workerThread->enqueueTask(m_taskFactory->createEnoughData(m_context, src));
1097 : }
1098 : }
1099 :
1100 3 : void GstGenericPlayer::scheduleAudioUnderflow()
1101 : {
1102 3 : if (m_workerThread)
1103 : {
1104 3 : bool underflowEnabled = m_context.isPlaying && !m_context.audioSourceRemoved;
1105 6 : m_workerThread->enqueueTask(
1106 6 : m_taskFactory->createUnderflow(m_context, *this, underflowEnabled, MediaSourceType::AUDIO));
1107 : }
1108 3 : }
1109 :
1110 2 : void GstGenericPlayer::scheduleVideoUnderflow()
1111 : {
1112 2 : if (m_workerThread)
1113 : {
1114 2 : bool underflowEnabled = m_context.isPlaying;
1115 4 : m_workerThread->enqueueTask(
1116 4 : m_taskFactory->createUnderflow(m_context, *this, underflowEnabled, MediaSourceType::VIDEO));
1117 : }
1118 2 : }
1119 :
1120 1 : void GstGenericPlayer::scheduleAllSourcesAttached()
1121 : {
1122 1 : allSourcesAttached();
1123 : }
1124 :
1125 14 : void GstGenericPlayer::cancelUnderflow(firebolt::rialto::MediaSourceType mediaSource)
1126 : {
1127 14 : auto elem = m_context.streamInfo.find(mediaSource);
1128 14 : if (elem != m_context.streamInfo.end())
1129 : {
1130 14 : StreamInfo &streamInfo = elem->second;
1131 14 : if (!streamInfo.underflowOccured)
1132 : {
1133 11 : return;
1134 : }
1135 :
1136 3 : RIALTO_SERVER_LOG_DEBUG("Cancelling %s underflow", common::convertMediaSourceType(mediaSource));
1137 3 : streamInfo.underflowOccured = false;
1138 : }
1139 : }
1140 :
1141 1 : void GstGenericPlayer::play()
1142 : {
1143 1 : if (m_workerThread)
1144 : {
1145 1 : m_workerThread->enqueueTask(m_taskFactory->createPlay(*this));
1146 : }
1147 : }
1148 :
1149 1 : void GstGenericPlayer::pause()
1150 : {
1151 1 : if (m_workerThread)
1152 : {
1153 1 : m_workerThread->enqueueTask(m_taskFactory->createPause(m_context, *this));
1154 : }
1155 : }
1156 :
1157 1 : void GstGenericPlayer::stop()
1158 : {
1159 1 : if (m_workerThread)
1160 : {
1161 1 : m_workerThread->enqueueTask(m_taskFactory->createStop(m_context, *this));
1162 : }
1163 : }
1164 :
1165 4 : bool GstGenericPlayer::changePipelineState(GstState newState)
1166 : {
1167 4 : if (!m_context.pipeline)
1168 : {
1169 1 : RIALTO_SERVER_LOG_ERROR("Change state failed - pipeline is nullptr");
1170 1 : if (m_gstPlayerClient)
1171 1 : m_gstPlayerClient->notifyPlaybackState(PlaybackState::FAILURE);
1172 1 : return false;
1173 : }
1174 3 : if (m_gstWrapper->gstElementSetState(m_context.pipeline, newState) == GST_STATE_CHANGE_FAILURE)
1175 : {
1176 1 : RIALTO_SERVER_LOG_ERROR("Change state failed - Gstreamer returned an error");
1177 1 : if (m_gstPlayerClient)
1178 1 : m_gstPlayerClient->notifyPlaybackState(PlaybackState::FAILURE);
1179 1 : return false;
1180 : }
1181 2 : return true;
1182 : }
1183 :
1184 1 : void GstGenericPlayer::setVideoGeometry(int x, int y, int width, int height)
1185 : {
1186 1 : if (m_workerThread)
1187 : {
1188 2 : m_workerThread->enqueueTask(
1189 2 : m_taskFactory->createSetVideoGeometry(m_context, *this, Rectangle{x, y, width, height}));
1190 : }
1191 1 : }
1192 :
1193 1 : void GstGenericPlayer::setEos(const firebolt::rialto::MediaSourceType &type)
1194 : {
1195 1 : if (m_workerThread)
1196 : {
1197 1 : m_workerThread->enqueueTask(m_taskFactory->createEos(m_context, *this, type));
1198 : }
1199 : }
1200 :
1201 4 : bool GstGenericPlayer::setVideoSinkRectangle()
1202 : {
1203 4 : bool result = false;
1204 4 : GstElement *videoSink{getSink(MediaSourceType::VIDEO)};
1205 4 : if (videoSink)
1206 : {
1207 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "rectangle"))
1208 : {
1209 : std::string rect =
1210 4 : std::to_string(m_context.pendingGeometry.x) + ',' + std::to_string(m_context.pendingGeometry.y) + ',' +
1211 6 : std::to_string(m_context.pendingGeometry.width) + ',' + std::to_string(m_context.pendingGeometry.height);
1212 2 : m_glibWrapper->gObjectSet(videoSink, "rectangle", rect.c_str(), nullptr);
1213 2 : m_context.pendingGeometry.clear();
1214 2 : result = true;
1215 : }
1216 : else
1217 : {
1218 1 : RIALTO_SERVER_LOG_ERROR("Failed to set the video rectangle");
1219 : }
1220 3 : m_gstWrapper->gstObjectUnref(videoSink);
1221 : }
1222 :
1223 4 : return result;
1224 : }
1225 :
1226 3 : bool GstGenericPlayer::setImmediateOutput()
1227 : {
1228 3 : bool result{false};
1229 3 : if (m_context.pendingImmediateOutputForVideo.has_value())
1230 : {
1231 3 : GstElement *sink{getSink(MediaSourceType::VIDEO)};
1232 3 : if (sink)
1233 : {
1234 2 : bool immediateOutput{m_context.pendingImmediateOutputForVideo.value()};
1235 2 : RIALTO_SERVER_LOG_DEBUG("Set immediate-output to %s", immediateOutput ? "TRUE" : "FALSE");
1236 :
1237 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "immediate-output"))
1238 : {
1239 1 : gboolean immediateOutputGboolean{immediateOutput ? TRUE : FALSE};
1240 1 : m_glibWrapper->gObjectSet(sink, "immediate-output", immediateOutputGboolean, nullptr);
1241 1 : result = true;
1242 : }
1243 : else
1244 : {
1245 1 : RIALTO_SERVER_LOG_ERROR("Failed to set immediate-output property on sink '%s'", GST_ELEMENT_NAME(sink));
1246 : }
1247 2 : m_context.pendingImmediateOutputForVideo.reset();
1248 2 : m_gstWrapper->gstObjectUnref(sink);
1249 : }
1250 : else
1251 : {
1252 1 : RIALTO_SERVER_LOG_DEBUG("Pending an immediate-output, sink is NULL");
1253 : }
1254 : }
1255 3 : return result;
1256 : }
1257 :
1258 4 : bool GstGenericPlayer::setShowVideoWindow()
1259 : {
1260 4 : if (!m_context.pendingShowVideoWindow.has_value())
1261 : {
1262 1 : RIALTO_SERVER_LOG_WARN("No show video window value to be set. Aborting...");
1263 1 : return false;
1264 : }
1265 :
1266 3 : GstElement *videoSink{getSink(MediaSourceType::VIDEO)};
1267 3 : if (!videoSink)
1268 : {
1269 1 : RIALTO_SERVER_LOG_DEBUG("Setting show video window queued. Video sink is NULL");
1270 1 : return false;
1271 : }
1272 2 : bool result{false};
1273 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "show-video-window"))
1274 : {
1275 1 : m_glibWrapper->gObjectSet(videoSink, "show-video-window", m_context.pendingShowVideoWindow.value(), nullptr);
1276 1 : result = true;
1277 : }
1278 : else
1279 : {
1280 1 : RIALTO_SERVER_LOG_ERROR("Setting show video window failed. Property does not exist");
1281 : }
1282 2 : m_context.pendingShowVideoWindow.reset();
1283 2 : m_gstWrapper->gstObjectUnref(GST_OBJECT(videoSink));
1284 2 : return result;
1285 : }
1286 :
1287 4 : bool GstGenericPlayer::setLowLatency()
1288 : {
1289 4 : bool result{false};
1290 4 : if (m_context.pendingLowLatency.has_value())
1291 : {
1292 4 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1293 4 : if (sink)
1294 : {
1295 3 : bool lowLatency{m_context.pendingLowLatency.value()};
1296 3 : RIALTO_SERVER_LOG_DEBUG("Set low-latency to %s", lowLatency ? "TRUE" : "FALSE");
1297 :
1298 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "low-latency"))
1299 : {
1300 2 : gboolean lowLatencyGboolean{lowLatency ? TRUE : FALSE};
1301 2 : m_glibWrapper->gObjectSet(sink, "low-latency", lowLatencyGboolean, nullptr);
1302 2 : result = true;
1303 : }
1304 : else
1305 : {
1306 1 : RIALTO_SERVER_LOG_ERROR("Failed to set low-latency property on sink '%s'", GST_ELEMENT_NAME(sink));
1307 : }
1308 3 : m_context.pendingLowLatency.reset();
1309 3 : m_gstWrapper->gstObjectUnref(sink);
1310 : }
1311 : else
1312 : {
1313 1 : RIALTO_SERVER_LOG_DEBUG("Pending low-latency, sink is NULL");
1314 : }
1315 : }
1316 4 : return result;
1317 : }
1318 :
1319 3 : bool GstGenericPlayer::setSync()
1320 : {
1321 3 : bool result{false};
1322 3 : if (m_context.pendingSync.has_value())
1323 : {
1324 3 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1325 3 : if (sink)
1326 : {
1327 2 : bool sync{m_context.pendingSync.value()};
1328 2 : RIALTO_SERVER_LOG_DEBUG("Set sync to %s", sync ? "TRUE" : "FALSE");
1329 :
1330 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "sync"))
1331 : {
1332 1 : gboolean syncGboolean{sync ? TRUE : FALSE};
1333 1 : m_glibWrapper->gObjectSet(sink, "sync", syncGboolean, nullptr);
1334 1 : result = true;
1335 : }
1336 : else
1337 : {
1338 1 : RIALTO_SERVER_LOG_ERROR("Failed to set sync property on sink '%s'", GST_ELEMENT_NAME(sink));
1339 : }
1340 2 : m_context.pendingSync.reset();
1341 2 : m_gstWrapper->gstObjectUnref(sink);
1342 : }
1343 : else
1344 : {
1345 1 : RIALTO_SERVER_LOG_DEBUG("Pending sync, sink is NULL");
1346 : }
1347 : }
1348 3 : return result;
1349 : }
1350 :
1351 3 : bool GstGenericPlayer::setSyncOff()
1352 : {
1353 3 : bool result{false};
1354 3 : if (m_context.pendingSyncOff.has_value())
1355 : {
1356 3 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1357 3 : if (decoder)
1358 : {
1359 2 : bool syncOff{m_context.pendingSyncOff.value()};
1360 2 : RIALTO_SERVER_LOG_DEBUG("Set sync-off to %s", syncOff ? "TRUE" : "FALSE");
1361 :
1362 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "sync-off"))
1363 : {
1364 1 : gboolean syncOffGboolean{decoder ? TRUE : FALSE};
1365 1 : m_glibWrapper->gObjectSet(decoder, "sync-off", syncOffGboolean, nullptr);
1366 1 : result = true;
1367 : }
1368 : else
1369 : {
1370 1 : RIALTO_SERVER_LOG_ERROR("Failed to set sync-off property on decoder '%s'", GST_ELEMENT_NAME(decoder));
1371 : }
1372 2 : m_context.pendingSyncOff.reset();
1373 2 : m_gstWrapper->gstObjectUnref(decoder);
1374 : }
1375 : else
1376 : {
1377 1 : RIALTO_SERVER_LOG_DEBUG("Pending sync-off, decoder is NULL");
1378 : }
1379 : }
1380 3 : return result;
1381 : }
1382 :
1383 6 : bool GstGenericPlayer::setStreamSyncMode(const MediaSourceType &type)
1384 : {
1385 6 : bool result{false};
1386 6 : int32_t streamSyncMode{0};
1387 : {
1388 6 : std::unique_lock lock{m_context.propertyMutex};
1389 6 : if (m_context.pendingStreamSyncMode.find(type) == m_context.pendingStreamSyncMode.end())
1390 : {
1391 0 : return false;
1392 : }
1393 6 : streamSyncMode = m_context.pendingStreamSyncMode[type];
1394 : }
1395 6 : if (MediaSourceType::AUDIO == type)
1396 : {
1397 3 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1398 3 : if (!decoder)
1399 : {
1400 1 : RIALTO_SERVER_LOG_DEBUG("Pending stream-sync-mode, decoder is NULL");
1401 1 : return false;
1402 : }
1403 :
1404 2 : RIALTO_SERVER_LOG_DEBUG("Set stream-sync-mode to %d", streamSyncMode);
1405 :
1406 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "stream-sync-mode"))
1407 : {
1408 1 : gint streamSyncModeGint{static_cast<gint>(streamSyncMode)};
1409 1 : m_glibWrapper->gObjectSet(decoder, "stream-sync-mode", streamSyncModeGint, nullptr);
1410 1 : result = true;
1411 : }
1412 : else
1413 : {
1414 1 : RIALTO_SERVER_LOG_ERROR("Failed to set stream-sync-mode property on decoder '%s'", GST_ELEMENT_NAME(decoder));
1415 : }
1416 2 : m_gstWrapper->gstObjectUnref(decoder);
1417 2 : std::unique_lock lock{m_context.propertyMutex};
1418 2 : m_context.pendingStreamSyncMode.erase(type);
1419 : }
1420 3 : else if (MediaSourceType::VIDEO == type)
1421 : {
1422 3 : GstElement *parser = getParser(MediaSourceType::VIDEO);
1423 3 : if (!parser)
1424 : {
1425 1 : RIALTO_SERVER_LOG_DEBUG("Pending syncmode-streaming, parser is NULL");
1426 1 : return false;
1427 : }
1428 :
1429 2 : gboolean streamSyncModeBoolean{static_cast<gboolean>(streamSyncMode)};
1430 2 : RIALTO_SERVER_LOG_DEBUG("Set syncmode-streaming to %d", streamSyncMode);
1431 :
1432 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(parser), "syncmode-streaming"))
1433 : {
1434 1 : m_glibWrapper->gObjectSet(parser, "syncmode-streaming", streamSyncModeBoolean, nullptr);
1435 1 : result = true;
1436 : }
1437 : else
1438 : {
1439 1 : RIALTO_SERVER_LOG_ERROR("Failed to set syncmode-streaming property on parser '%s'", GST_ELEMENT_NAME(parser));
1440 : }
1441 2 : m_gstWrapper->gstObjectUnref(parser);
1442 2 : std::unique_lock lock{m_context.propertyMutex};
1443 2 : m_context.pendingStreamSyncMode.erase(type);
1444 : }
1445 4 : return result;
1446 : }
1447 :
1448 3 : bool GstGenericPlayer::setRenderFrame()
1449 : {
1450 3 : bool result{false};
1451 3 : if (m_context.pendingRenderFrame)
1452 : {
1453 5 : static const std::string kStepOnPrerollPropertyName = "frame-step-on-preroll";
1454 3 : GstElement *sink{getSink(MediaSourceType::VIDEO)};
1455 3 : if (sink)
1456 : {
1457 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), kStepOnPrerollPropertyName.c_str()))
1458 : {
1459 1 : RIALTO_SERVER_LOG_INFO("Rendering preroll");
1460 :
1461 1 : m_glibWrapper->gObjectSet(sink, kStepOnPrerollPropertyName.c_str(), 1, nullptr);
1462 1 : m_gstWrapper->gstElementSendEvent(sink, m_gstWrapper->gstEventNewStep(GST_FORMAT_BUFFERS, 1, 1.0, true,
1463 : false));
1464 1 : m_glibWrapper->gObjectSet(sink, kStepOnPrerollPropertyName.c_str(), 0, nullptr);
1465 1 : result = true;
1466 : }
1467 : else
1468 : {
1469 1 : RIALTO_SERVER_LOG_ERROR("Video sink doesn't have property `%s`", kStepOnPrerollPropertyName.c_str());
1470 : }
1471 2 : m_gstWrapper->gstObjectUnref(sink);
1472 2 : m_context.pendingRenderFrame = false;
1473 : }
1474 : else
1475 : {
1476 1 : RIALTO_SERVER_LOG_DEBUG("Pending render frame, sink is NULL");
1477 : }
1478 : }
1479 3 : return result;
1480 : }
1481 :
1482 3 : bool GstGenericPlayer::setBufferingLimit()
1483 : {
1484 3 : bool result{false};
1485 3 : guint bufferingLimit{0};
1486 : {
1487 3 : std::unique_lock lock{m_context.propertyMutex};
1488 3 : if (!m_context.pendingBufferingLimit.has_value())
1489 : {
1490 0 : return false;
1491 : }
1492 3 : bufferingLimit = static_cast<guint>(m_context.pendingBufferingLimit.value());
1493 : }
1494 :
1495 3 : GstElement *decoder{getDecoder(MediaSourceType::AUDIO)};
1496 3 : if (decoder)
1497 : {
1498 2 : RIALTO_SERVER_LOG_DEBUG("Set limit-buffering-ms to %u", bufferingLimit);
1499 :
1500 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "limit-buffering-ms"))
1501 : {
1502 1 : m_glibWrapper->gObjectSet(decoder, "limit-buffering-ms", bufferingLimit, nullptr);
1503 1 : result = true;
1504 : }
1505 : else
1506 : {
1507 1 : RIALTO_SERVER_LOG_ERROR("Failed to set limit-buffering-ms property on decoder '%s'",
1508 : GST_ELEMENT_NAME(decoder));
1509 : }
1510 2 : m_gstWrapper->gstObjectUnref(decoder);
1511 2 : std::unique_lock lock{m_context.propertyMutex};
1512 2 : m_context.pendingBufferingLimit.reset();
1513 : }
1514 : else
1515 : {
1516 1 : RIALTO_SERVER_LOG_DEBUG("Pending limit-buffering-ms, decoder is NULL");
1517 : }
1518 3 : return result;
1519 : }
1520 :
1521 2 : bool GstGenericPlayer::setUseBuffering()
1522 : {
1523 2 : std::unique_lock lock{m_context.propertyMutex};
1524 2 : if (m_context.pendingUseBuffering.has_value())
1525 : {
1526 2 : if (m_context.playbackGroup.m_curAudioDecodeBin)
1527 : {
1528 1 : gboolean useBufferingGboolean{m_context.pendingUseBuffering.value() ? TRUE : FALSE};
1529 1 : RIALTO_SERVER_LOG_DEBUG("Set use-buffering to %d", useBufferingGboolean);
1530 1 : m_glibWrapper->gObjectSet(m_context.playbackGroup.m_curAudioDecodeBin, "use-buffering",
1531 : useBufferingGboolean, nullptr);
1532 1 : m_context.pendingUseBuffering.reset();
1533 1 : return true;
1534 : }
1535 : else
1536 : {
1537 1 : RIALTO_SERVER_LOG_DEBUG("Pending use-buffering, decodebin is NULL");
1538 : }
1539 : }
1540 1 : return false;
1541 2 : }
1542 :
1543 8 : bool GstGenericPlayer::setWesterossinkSecondaryVideo()
1544 : {
1545 8 : bool result = false;
1546 8 : GstElementFactory *factory = m_gstWrapper->gstElementFactoryFind("westerossink");
1547 8 : if (factory)
1548 : {
1549 7 : GstElement *videoSink = m_gstWrapper->gstElementFactoryCreate(factory, nullptr);
1550 7 : if (videoSink)
1551 : {
1552 5 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "res-usage"))
1553 : {
1554 4 : m_glibWrapper->gObjectSet(videoSink, "res-usage", 0x0u, nullptr);
1555 4 : m_glibWrapper->gObjectSet(m_context.pipeline, "video-sink", videoSink, nullptr);
1556 4 : result = true;
1557 : }
1558 : else
1559 : {
1560 1 : RIALTO_SERVER_LOG_ERROR("Failed to set the westerossink res-usage");
1561 1 : m_gstWrapper->gstObjectUnref(GST_OBJECT(videoSink));
1562 : }
1563 : }
1564 : else
1565 : {
1566 2 : RIALTO_SERVER_LOG_ERROR("Failed to create the westerossink");
1567 : }
1568 :
1569 7 : m_gstWrapper->gstObjectUnref(GST_OBJECT(factory));
1570 : }
1571 : else
1572 : {
1573 : // No westeros sink
1574 1 : result = true;
1575 : }
1576 :
1577 8 : return result;
1578 : }
1579 :
1580 8 : bool GstGenericPlayer::setErmContext()
1581 : {
1582 8 : bool result = false;
1583 8 : GstContext *context = m_gstWrapper->gstContextNew("erm", false);
1584 8 : if (context)
1585 : {
1586 6 : GstStructure *contextStructure = m_gstWrapper->gstContextWritableStructure(context);
1587 6 : if (contextStructure)
1588 : {
1589 5 : m_gstWrapper->gstStructureSet(contextStructure, "res-usage", G_TYPE_UINT, 0x0u, nullptr);
1590 5 : m_gstWrapper->gstElementSetContext(GST_ELEMENT(m_context.pipeline), context);
1591 5 : result = true;
1592 : }
1593 : else
1594 : {
1595 1 : RIALTO_SERVER_LOG_ERROR("Failed to create the erm structure");
1596 : }
1597 6 : m_gstWrapper->gstContextUnref(context);
1598 : }
1599 : else
1600 : {
1601 2 : RIALTO_SERVER_LOG_ERROR("Failed to create the erm context");
1602 : }
1603 :
1604 8 : return result;
1605 : }
1606 :
1607 6 : void GstGenericPlayer::startPositionReportingAndCheckAudioUnderflowTimer()
1608 : {
1609 6 : if (m_positionReportingAndCheckAudioUnderflowTimer && m_positionReportingAndCheckAudioUnderflowTimer->isActive())
1610 : {
1611 1 : return;
1612 : }
1613 :
1614 15 : m_positionReportingAndCheckAudioUnderflowTimer = m_timerFactory->createTimer(
1615 : kPositionReportTimerMs,
1616 10 : [this]()
1617 : {
1618 1 : if (m_workerThread)
1619 : {
1620 1 : m_workerThread->enqueueTask(m_taskFactory->createReportPosition(m_context));
1621 1 : m_workerThread->enqueueTask(m_taskFactory->createCheckAudioUnderflow(m_context, *this));
1622 : }
1623 1 : },
1624 5 : firebolt::rialto::common::TimerType::PERIODIC);
1625 : }
1626 :
1627 4 : void GstGenericPlayer::stopPositionReportingAndCheckAudioUnderflowTimer()
1628 : {
1629 4 : if (m_positionReportingAndCheckAudioUnderflowTimer && m_positionReportingAndCheckAudioUnderflowTimer->isActive())
1630 : {
1631 1 : m_positionReportingAndCheckAudioUnderflowTimer->cancel();
1632 1 : m_positionReportingAndCheckAudioUnderflowTimer.reset();
1633 : }
1634 4 : }
1635 :
1636 0 : void GstGenericPlayer::setPendingPlaybackRate()
1637 : {
1638 0 : RIALTO_SERVER_LOG_INFO("Setting pending playback rate");
1639 0 : setPlaybackRate(m_context.pendingPlaybackRate);
1640 : }
1641 :
1642 1 : void GstGenericPlayer::renderFrame()
1643 : {
1644 1 : if (m_workerThread)
1645 : {
1646 1 : m_workerThread->enqueueTask(m_taskFactory->createRenderFrame(m_context, *this));
1647 : }
1648 : }
1649 :
1650 16 : void GstGenericPlayer::setVolume(double targetVolume, uint32_t volumeDuration, firebolt::rialto::EaseType easeType)
1651 : {
1652 16 : if (m_workerThread)
1653 : {
1654 32 : m_workerThread->enqueueTask(
1655 32 : m_taskFactory->createSetVolume(m_context, *this, targetVolume, volumeDuration, easeType));
1656 : }
1657 16 : }
1658 :
1659 3 : bool GstGenericPlayer::getVolume(double ¤tVolume)
1660 : {
1661 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
1662 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
1663 3 : if (!m_context.pipeline)
1664 : {
1665 0 : return false;
1666 : }
1667 :
1668 : // NOTE: No gstreamer documentation for "fade-volume" could be found at the time this code was written.
1669 : // Therefore the author performed several tests on a supported platform (Flex2) to determine the behaviour of this property.
1670 : // The code has been written to be backwardly compatible on platforms that don't have this property.
1671 : // The observed behaviour was:
1672 : // - if the returned fade volume is negative then audio-fade is not active. In this case the usual technique
1673 : // to find volume in the pipeline works and is used.
1674 : // - if the returned fade volume is positive then audio-fade is active. In this case the returned fade volume
1675 : // directly returns the current volume level 0=min to 100=max (and the pipeline's current volume level is
1676 : // meaningless and doesn't contribute in this case).
1677 3 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1678 5 : if (m_context.audioFadeEnabled && sink &&
1679 2 : m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "fade-volume"))
1680 : {
1681 2 : gint fadeVolume{-100};
1682 2 : m_glibWrapper->gObjectGet(sink, "fade-volume", &fadeVolume, NULL);
1683 2 : if (fadeVolume < 0)
1684 : {
1685 1 : currentVolume = m_gstWrapper->gstStreamVolumeGetVolume(GST_STREAM_VOLUME(m_context.pipeline),
1686 : GST_STREAM_VOLUME_FORMAT_LINEAR);
1687 1 : RIALTO_SERVER_LOG_INFO("Fade volume is negative, using volume from pipeline: %f", currentVolume);
1688 : }
1689 : else
1690 : {
1691 1 : currentVolume = static_cast<double>(fadeVolume) / 100.0;
1692 1 : RIALTO_SERVER_LOG_INFO("Fade volume is supported: %f", currentVolume);
1693 : }
1694 : }
1695 : else
1696 : {
1697 1 : currentVolume = m_gstWrapper->gstStreamVolumeGetVolume(GST_STREAM_VOLUME(m_context.pipeline),
1698 : GST_STREAM_VOLUME_FORMAT_LINEAR);
1699 1 : RIALTO_SERVER_LOG_INFO("Fade volume is not supported, using volume from pipeline: %f", currentVolume);
1700 : }
1701 :
1702 3 : if (sink)
1703 2 : m_gstWrapper->gstObjectUnref(sink);
1704 :
1705 3 : return true;
1706 : }
1707 :
1708 1 : void GstGenericPlayer::setMute(const MediaSourceType &mediaSourceType, bool mute)
1709 : {
1710 1 : if (m_workerThread)
1711 : {
1712 1 : m_workerThread->enqueueTask(m_taskFactory->createSetMute(m_context, *this, mediaSourceType, mute));
1713 : }
1714 : }
1715 :
1716 5 : bool GstGenericPlayer::getMute(const MediaSourceType &mediaSourceType, bool &mute)
1717 : {
1718 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
1719 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
1720 5 : if (mediaSourceType == MediaSourceType::SUBTITLE)
1721 : {
1722 2 : if (!m_context.subtitleSink)
1723 : {
1724 1 : RIALTO_SERVER_LOG_ERROR("There is no subtitle sink");
1725 1 : return false;
1726 : }
1727 1 : gboolean muteValue{FALSE};
1728 1 : m_glibWrapper->gObjectGet(m_context.subtitleSink, "mute", &muteValue, nullptr);
1729 1 : mute = muteValue;
1730 : }
1731 3 : else if (mediaSourceType == MediaSourceType::AUDIO)
1732 : {
1733 2 : if (!m_context.pipeline)
1734 : {
1735 1 : return false;
1736 : }
1737 1 : mute = m_gstWrapper->gstStreamVolumeGetMute(GST_STREAM_VOLUME(m_context.pipeline));
1738 : }
1739 : else
1740 : {
1741 1 : RIALTO_SERVER_LOG_ERROR("Getting mute for type %s unsupported", common::convertMediaSourceType(mediaSourceType));
1742 1 : return false;
1743 : }
1744 :
1745 2 : return true;
1746 : }
1747 :
1748 1 : bool GstGenericPlayer::isAsync(const MediaSourceType &mediaSourceType) const
1749 : {
1750 1 : GstElement *sink = getSink(mediaSourceType);
1751 1 : if (!sink)
1752 : {
1753 0 : RIALTO_SERVER_LOG_WARN("Sink not found for %s", common::convertMediaSourceType(mediaSourceType));
1754 0 : return true; // Our sinks are async by default
1755 : }
1756 1 : gboolean returnValue{TRUE};
1757 1 : m_glibWrapper->gObjectGet(sink, "async", &returnValue, nullptr);
1758 1 : m_gstWrapper->gstObjectUnref(sink);
1759 1 : return returnValue == TRUE;
1760 : }
1761 :
1762 1 : void GstGenericPlayer::setTextTrackIdentifier(const std::string &textTrackIdentifier)
1763 : {
1764 1 : if (m_workerThread)
1765 : {
1766 1 : m_workerThread->enqueueTask(m_taskFactory->createSetTextTrackIdentifier(m_context, textTrackIdentifier));
1767 : }
1768 : }
1769 :
1770 3 : bool GstGenericPlayer::getTextTrackIdentifier(std::string &textTrackIdentifier)
1771 : {
1772 3 : if (!m_context.subtitleSink)
1773 : {
1774 1 : RIALTO_SERVER_LOG_ERROR("There is no subtitle sink");
1775 1 : return false;
1776 : }
1777 :
1778 2 : gchar *identifier = nullptr;
1779 2 : m_glibWrapper->gObjectGet(m_context.subtitleSink, "text-track-identifier", &identifier, nullptr);
1780 :
1781 2 : if (identifier)
1782 : {
1783 1 : textTrackIdentifier = identifier;
1784 1 : m_glibWrapper->gFree(identifier);
1785 1 : return true;
1786 : }
1787 : else
1788 : {
1789 1 : RIALTO_SERVER_LOG_ERROR("Failed to get text track identifier");
1790 1 : return false;
1791 : }
1792 : }
1793 :
1794 1 : bool GstGenericPlayer::setLowLatency(bool lowLatency)
1795 : {
1796 1 : if (m_workerThread)
1797 : {
1798 1 : m_workerThread->enqueueTask(m_taskFactory->createSetLowLatency(m_context, *this, lowLatency));
1799 : }
1800 1 : return true;
1801 : }
1802 :
1803 1 : bool GstGenericPlayer::setSync(bool sync)
1804 : {
1805 1 : if (m_workerThread)
1806 : {
1807 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSync(m_context, *this, sync));
1808 : }
1809 1 : return true;
1810 : }
1811 :
1812 4 : bool GstGenericPlayer::getSync(bool &sync)
1813 : {
1814 4 : bool returnValue{false};
1815 4 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1816 4 : if (sink)
1817 : {
1818 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "sync"))
1819 : {
1820 1 : m_glibWrapper->gObjectGet(sink, "sync", &sync, nullptr);
1821 1 : returnValue = true;
1822 : }
1823 : else
1824 : {
1825 1 : RIALTO_SERVER_LOG_ERROR("Sync not supported in sink '%s'", GST_ELEMENT_NAME(sink));
1826 : }
1827 2 : m_gstWrapper->gstObjectUnref(sink);
1828 : }
1829 2 : else if (m_context.pendingSync.has_value())
1830 : {
1831 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
1832 1 : sync = m_context.pendingSync.value();
1833 1 : returnValue = true;
1834 : }
1835 : else
1836 : {
1837 : // We dont know the default setting on the sync, so return failure here
1838 1 : RIALTO_SERVER_LOG_WARN("No audio sink attached or queued value");
1839 : }
1840 :
1841 4 : return returnValue;
1842 : }
1843 :
1844 1 : bool GstGenericPlayer::setSyncOff(bool syncOff)
1845 : {
1846 1 : if (m_workerThread)
1847 : {
1848 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSyncOff(m_context, *this, syncOff));
1849 : }
1850 1 : return true;
1851 : }
1852 :
1853 1 : bool GstGenericPlayer::setStreamSyncMode(const MediaSourceType &mediaSourceType, int32_t streamSyncMode)
1854 : {
1855 1 : if (m_workerThread)
1856 : {
1857 2 : m_workerThread->enqueueTask(
1858 2 : m_taskFactory->createSetStreamSyncMode(m_context, *this, mediaSourceType, streamSyncMode));
1859 : }
1860 1 : return true;
1861 : }
1862 :
1863 5 : bool GstGenericPlayer::getStreamSyncMode(int32_t &streamSyncMode)
1864 : {
1865 5 : bool returnValue{false};
1866 5 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1867 5 : if (decoder && m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "stream-sync-mode"))
1868 : {
1869 2 : m_glibWrapper->gObjectGet(decoder, "stream-sync-mode", &streamSyncMode, nullptr);
1870 2 : returnValue = true;
1871 : }
1872 : else
1873 : {
1874 3 : std::unique_lock lock{m_context.propertyMutex};
1875 3 : if (m_context.pendingStreamSyncMode.find(MediaSourceType::AUDIO) != m_context.pendingStreamSyncMode.end())
1876 : {
1877 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
1878 1 : streamSyncMode = m_context.pendingStreamSyncMode[MediaSourceType::AUDIO];
1879 1 : returnValue = true;
1880 : }
1881 : else
1882 : {
1883 2 : RIALTO_SERVER_LOG_ERROR("Stream sync mode not supported in decoder '%s'",
1884 : (decoder ? GST_ELEMENT_NAME(decoder) : "null"));
1885 : }
1886 3 : }
1887 :
1888 5 : if (decoder)
1889 3 : m_gstWrapper->gstObjectUnref(GST_OBJECT(decoder));
1890 :
1891 5 : return returnValue;
1892 : }
1893 :
1894 1 : void GstGenericPlayer::ping(std::unique_ptr<IHeartbeatHandler> &&heartbeatHandler)
1895 : {
1896 1 : if (m_workerThread)
1897 : {
1898 1 : m_workerThread->enqueueTask(m_taskFactory->createPing(std::move(heartbeatHandler)));
1899 : }
1900 : }
1901 :
1902 1 : void GstGenericPlayer::flush(const MediaSourceType &mediaSourceType, bool resetTime, bool &async)
1903 : {
1904 1 : if (m_workerThread)
1905 : {
1906 1 : async = isAsync(mediaSourceType);
1907 1 : m_flushWatcher->setFlushing(mediaSourceType, async);
1908 1 : m_workerThread->enqueueTask(m_taskFactory->createFlush(m_context, *this, mediaSourceType, resetTime));
1909 : }
1910 : }
1911 :
1912 1 : void GstGenericPlayer::setSourcePosition(const MediaSourceType &mediaSourceType, int64_t position, bool resetTime,
1913 : double appliedRate, uint64_t stopPosition)
1914 : {
1915 1 : if (m_workerThread)
1916 : {
1917 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSourcePosition(m_context, *this, mediaSourceType, position,
1918 : resetTime, appliedRate, stopPosition));
1919 : }
1920 : }
1921 :
1922 1 : void GstGenericPlayer::processAudioGap(int64_t position, uint32_t duration, int64_t discontinuityGap, bool audioAac)
1923 : {
1924 1 : if (m_workerThread)
1925 : {
1926 2 : m_workerThread->enqueueTask(
1927 2 : m_taskFactory->createProcessAudioGap(m_context, position, duration, discontinuityGap, audioAac));
1928 : }
1929 1 : }
1930 :
1931 1 : void GstGenericPlayer::setBufferingLimit(uint32_t limitBufferingMs)
1932 : {
1933 1 : if (m_workerThread)
1934 : {
1935 1 : m_workerThread->enqueueTask(m_taskFactory->createSetBufferingLimit(m_context, *this, limitBufferingMs));
1936 : }
1937 : }
1938 :
1939 5 : bool GstGenericPlayer::getBufferingLimit(uint32_t &limitBufferingMs)
1940 : {
1941 5 : bool returnValue{false};
1942 5 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1943 5 : if (decoder && m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "limit-buffering-ms"))
1944 : {
1945 2 : m_glibWrapper->gObjectGet(decoder, "limit-buffering-ms", &limitBufferingMs, nullptr);
1946 2 : returnValue = true;
1947 : }
1948 : else
1949 : {
1950 3 : std::unique_lock lock{m_context.propertyMutex};
1951 3 : if (m_context.pendingBufferingLimit.has_value())
1952 : {
1953 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
1954 1 : limitBufferingMs = m_context.pendingBufferingLimit.value();
1955 1 : returnValue = true;
1956 : }
1957 : else
1958 : {
1959 2 : RIALTO_SERVER_LOG_ERROR("buffering limit not supported in decoder '%s'",
1960 : (decoder ? GST_ELEMENT_NAME(decoder) : "null"));
1961 : }
1962 3 : }
1963 :
1964 5 : if (decoder)
1965 3 : m_gstWrapper->gstObjectUnref(GST_OBJECT(decoder));
1966 :
1967 5 : return returnValue;
1968 : }
1969 :
1970 1 : void GstGenericPlayer::setUseBuffering(bool useBuffering)
1971 : {
1972 1 : if (m_workerThread)
1973 : {
1974 1 : m_workerThread->enqueueTask(m_taskFactory->createSetUseBuffering(m_context, *this, useBuffering));
1975 : }
1976 : }
1977 :
1978 3 : bool GstGenericPlayer::getUseBuffering(bool &useBuffering)
1979 : {
1980 3 : if (m_context.playbackGroup.m_curAudioDecodeBin)
1981 : {
1982 1 : m_glibWrapper->gObjectGet(m_context.playbackGroup.m_curAudioDecodeBin, "use-buffering", &useBuffering, nullptr);
1983 1 : return true;
1984 : }
1985 : else
1986 : {
1987 2 : std::unique_lock lock{m_context.propertyMutex};
1988 2 : if (m_context.pendingUseBuffering.has_value())
1989 : {
1990 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
1991 1 : useBuffering = m_context.pendingUseBuffering.value();
1992 1 : return true;
1993 : }
1994 2 : }
1995 1 : return false;
1996 : }
1997 :
1998 1 : void GstGenericPlayer::switchSource(const std::unique_ptr<IMediaPipeline::MediaSource> &mediaSource)
1999 : {
2000 1 : if (m_workerThread)
2001 : {
2002 1 : m_workerThread->enqueueTask(m_taskFactory->createSwitchSource(*this, mediaSource));
2003 : }
2004 : }
2005 :
2006 1 : void GstGenericPlayer::handleBusMessage(GstMessage *message)
2007 : {
2008 1 : m_workerThread->enqueueTask(m_taskFactory->createHandleBusMessage(m_context, *this, message, *m_flushWatcher));
2009 : }
2010 :
2011 1 : void GstGenericPlayer::updatePlaybackGroup(GstElement *typefind, const GstCaps *caps)
2012 : {
2013 1 : m_workerThread->enqueueTask(m_taskFactory->createUpdatePlaybackGroup(m_context, *this, typefind, caps));
2014 : }
2015 :
2016 3 : void GstGenericPlayer::addAutoVideoSinkChild(GObject *object)
2017 : {
2018 : // Only add children that are sinks
2019 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2020 : {
2021 2 : RIALTO_SERVER_LOG_DEBUG("Store AutoVideoSink child sink");
2022 :
2023 2 : if (m_context.autoVideoChildSink && m_context.autoVideoChildSink != GST_ELEMENT(object))
2024 : {
2025 1 : RIALTO_SERVER_LOG_MIL("AutoVideoSink child is been overwritten");
2026 : }
2027 2 : m_context.autoVideoChildSink = GST_ELEMENT(object);
2028 : }
2029 3 : }
2030 :
2031 3 : void GstGenericPlayer::addAutoAudioSinkChild(GObject *object)
2032 : {
2033 : // Only add children that are sinks
2034 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2035 : {
2036 2 : RIALTO_SERVER_LOG_DEBUG("Store AutoAudioSink child sink");
2037 :
2038 2 : if (m_context.autoAudioChildSink && m_context.autoAudioChildSink != GST_ELEMENT(object))
2039 : {
2040 1 : RIALTO_SERVER_LOG_MIL("AutoAudioSink child is been overwritten");
2041 : }
2042 2 : m_context.autoAudioChildSink = GST_ELEMENT(object);
2043 : }
2044 3 : }
2045 :
2046 3 : void GstGenericPlayer::removeAutoVideoSinkChild(GObject *object)
2047 : {
2048 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2049 : {
2050 3 : RIALTO_SERVER_LOG_DEBUG("Remove AutoVideoSink child sink");
2051 :
2052 3 : if (m_context.autoVideoChildSink && m_context.autoVideoChildSink != GST_ELEMENT(object))
2053 : {
2054 1 : RIALTO_SERVER_LOG_MIL("AutoVideoSink child sink is not the same as the one stored");
2055 1 : return;
2056 : }
2057 :
2058 2 : m_context.autoVideoChildSink = nullptr;
2059 : }
2060 : }
2061 :
2062 3 : void GstGenericPlayer::removeAutoAudioSinkChild(GObject *object)
2063 : {
2064 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2065 : {
2066 3 : RIALTO_SERVER_LOG_DEBUG("Remove AutoAudioSink child sink");
2067 :
2068 3 : if (m_context.autoAudioChildSink && m_context.autoAudioChildSink != GST_ELEMENT(object))
2069 : {
2070 1 : RIALTO_SERVER_LOG_MIL("AutoAudioSink child sink is not the same as the one stored");
2071 1 : return;
2072 : }
2073 :
2074 2 : m_context.autoAudioChildSink = nullptr;
2075 : }
2076 : }
2077 :
2078 14 : GstElement *GstGenericPlayer::getSinkChildIfAutoVideoSink(GstElement *sink) const
2079 : {
2080 14 : const gchar *kTmpName = m_glibWrapper->gTypeName(G_OBJECT_TYPE(sink));
2081 14 : if (!kTmpName)
2082 0 : return sink;
2083 :
2084 28 : const std::string kElementTypeName{kTmpName};
2085 14 : if (kElementTypeName == "GstAutoVideoSink")
2086 : {
2087 1 : if (!m_context.autoVideoChildSink)
2088 : {
2089 0 : RIALTO_SERVER_LOG_WARN("No child sink has been added to the autovideosink");
2090 : }
2091 : else
2092 : {
2093 1 : return m_context.autoVideoChildSink;
2094 : }
2095 : }
2096 13 : return sink;
2097 14 : }
2098 :
2099 11 : GstElement *GstGenericPlayer::getSinkChildIfAutoAudioSink(GstElement *sink) const
2100 : {
2101 11 : const gchar *kTmpName = m_glibWrapper->gTypeName(G_OBJECT_TYPE(sink));
2102 11 : if (!kTmpName)
2103 0 : return sink;
2104 :
2105 22 : const std::string kElementTypeName{kTmpName};
2106 11 : if (kElementTypeName == "GstAutoAudioSink")
2107 : {
2108 1 : if (!m_context.autoAudioChildSink)
2109 : {
2110 0 : RIALTO_SERVER_LOG_WARN("No child sink has been added to the autoaudiosink");
2111 : }
2112 : else
2113 : {
2114 1 : return m_context.autoAudioChildSink;
2115 : }
2116 : }
2117 10 : return sink;
2118 11 : }
2119 :
2120 205 : void GstGenericPlayer::setPlaybinFlags(bool enableAudio)
2121 : {
2122 205 : unsigned flags = getGstPlayFlag("video") | getGstPlayFlag("native-video") | getGstPlayFlag("text");
2123 :
2124 205 : if (enableAudio)
2125 : {
2126 205 : flags |= getGstPlayFlag("audio");
2127 205 : flags |= shouldEnableNativeAudio() ? getGstPlayFlag("native-audio") : 0;
2128 : }
2129 :
2130 205 : m_glibWrapper->gObjectSet(m_context.pipeline, "flags", flags, nullptr);
2131 : }
2132 :
2133 205 : bool GstGenericPlayer::shouldEnableNativeAudio()
2134 : {
2135 205 : GstElementFactory *factory = m_gstWrapper->gstElementFactoryFind("brcmaudiosink");
2136 205 : if (factory)
2137 : {
2138 1 : m_gstWrapper->gstObjectUnref(GST_OBJECT(factory));
2139 1 : return true;
2140 : }
2141 204 : return false;
2142 : }
2143 :
2144 : }; // namespace firebolt::rialto::server
|