Line data Source code
1 : /*
2 : * If not stated otherwise in this file or this component's LICENSE file the
3 : * following copyright and licenses apply:
4 : *
5 : * Copyright 2022 Sky UK
6 : *
7 : * Licensed under the Apache License, Version 2.0 (the "License");
8 : * you may not use this file except in compliance with the License.
9 : * You may obtain a copy of the License at
10 : *
11 : * http://www.apache.org/licenses/LICENSE-2.0
12 : *
13 : * Unless required by applicable law or agreed to in writing, software
14 : * distributed under the License is distributed on an "AS IS" BASIS,
15 : * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 : * See the License for the specific language governing permissions and
17 : * limitations under the License.
18 : */
19 :
20 : #include <chrono>
21 : #include <cinttypes>
22 : #include <stdexcept>
23 :
24 : #include "FlushWatcher.h"
25 : #include "GstDispatcherThread.h"
26 : #include "GstGenericPlayer.h"
27 : #include "GstProtectionMetadata.h"
28 : #include "IGstTextTrackSinkFactory.h"
29 : #include "IMediaPipeline.h"
30 : #include "ITimer.h"
31 : #include "RialtoServerLogging.h"
32 : #include "TypeConverters.h"
33 : #include "Utils.h"
34 : #include "WorkerThread.h"
35 : #include "tasks/generic/GenericPlayerTaskFactory.h"
36 :
37 : namespace
38 : {
39 : /**
40 : * @brief Report position interval in ms.
41 : * The position reporting timer should be started whenever the PLAYING state is entered and stopped
42 : * whenever the session moves to another playback state.
43 : */
44 : constexpr std::chrono::milliseconds kPositionReportTimerMs{250};
45 :
46 1 : bool operator==(const firebolt::rialto::server::SegmentData &lhs, const firebolt::rialto::server::SegmentData &rhs)
47 : {
48 2 : return (lhs.position == rhs.position) && (lhs.resetTime == rhs.resetTime) && (lhs.appliedRate == rhs.appliedRate) &&
49 2 : (lhs.stopPosition == rhs.stopPosition);
50 : }
51 : } // namespace
52 :
53 : namespace firebolt::rialto::server
54 : {
55 : std::weak_ptr<IGstGenericPlayerFactory> GstGenericPlayerFactory::m_factory;
56 :
57 3 : std::shared_ptr<IGstGenericPlayerFactory> IGstGenericPlayerFactory::getFactory()
58 : {
59 3 : std::shared_ptr<IGstGenericPlayerFactory> factory = GstGenericPlayerFactory::m_factory.lock();
60 :
61 3 : if (!factory)
62 : {
63 : try
64 : {
65 3 : factory = std::make_shared<GstGenericPlayerFactory>();
66 : }
67 0 : catch (const std::exception &e)
68 : {
69 0 : RIALTO_SERVER_LOG_ERROR("Failed to create the gstreamer player factory, reason: %s", e.what());
70 : }
71 :
72 3 : GstGenericPlayerFactory::m_factory = factory;
73 : }
74 :
75 3 : return factory;
76 : }
77 :
78 1 : std::unique_ptr<IGstGenericPlayer> GstGenericPlayerFactory::createGstGenericPlayer(
79 : IGstGenericPlayerClient *client, IDecryptionService &decryptionService, MediaType type,
80 : const VideoRequirements &videoRequirements,
81 : const std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapperFactory> &rdkGstreamerUtilsWrapperFactory)
82 : {
83 1 : std::unique_ptr<IGstGenericPlayer> gstPlayer;
84 :
85 : try
86 : {
87 1 : auto gstWrapperFactory = firebolt::rialto::wrappers::IGstWrapperFactory::getFactory();
88 1 : auto glibWrapperFactory = firebolt::rialto::wrappers::IGlibWrapperFactory::getFactory();
89 1 : std::shared_ptr<firebolt::rialto::wrappers::IGstWrapper> gstWrapper;
90 1 : std::shared_ptr<firebolt::rialto::wrappers::IGlibWrapper> glibWrapper;
91 1 : std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapper> rdkGstreamerUtilsWrapper;
92 1 : if ((!gstWrapperFactory) || (!(gstWrapper = gstWrapperFactory->getGstWrapper())))
93 : {
94 0 : throw std::runtime_error("Cannot create GstWrapper");
95 : }
96 1 : if ((!glibWrapperFactory) || (!(glibWrapper = glibWrapperFactory->getGlibWrapper())))
97 : {
98 0 : throw std::runtime_error("Cannot create GlibWrapper");
99 : }
100 2 : if ((!rdkGstreamerUtilsWrapperFactory) ||
101 2 : (!(rdkGstreamerUtilsWrapper = rdkGstreamerUtilsWrapperFactory->createRdkGstreamerUtilsWrapper())))
102 : {
103 0 : throw std::runtime_error("Cannot create RdkGstreamerUtilsWrapper");
104 : }
105 : gstPlayer = std::make_unique<
106 2 : GstGenericPlayer>(client, decryptionService, type, videoRequirements, gstWrapper, glibWrapper,
107 2 : rdkGstreamerUtilsWrapper, IGstInitialiser::instance(), std::make_unique<FlushWatcher>(),
108 2 : IGstSrcFactory::getFactory(), common::ITimerFactory::getFactory(),
109 2 : std::make_unique<GenericPlayerTaskFactory>(client, gstWrapper, glibWrapper,
110 : rdkGstreamerUtilsWrapper,
111 2 : IGstTextTrackSinkFactory::createFactory()),
112 2 : std::make_unique<WorkerThreadFactory>(), std::make_unique<GstDispatcherThreadFactory>(),
113 3 : IGstProtectionMetadataHelperFactory::createFactory());
114 1 : }
115 0 : catch (const std::exception &e)
116 : {
117 0 : RIALTO_SERVER_LOG_ERROR("Failed to create the gstreamer player, reason: %s", e.what());
118 : }
119 :
120 1 : return gstPlayer;
121 : }
122 :
123 210 : GstGenericPlayer::GstGenericPlayer(
124 : IGstGenericPlayerClient *client, IDecryptionService &decryptionService, MediaType type,
125 : const VideoRequirements &videoRequirements,
126 : const std::shared_ptr<firebolt::rialto::wrappers::IGstWrapper> &gstWrapper,
127 : const std::shared_ptr<firebolt::rialto::wrappers::IGlibWrapper> &glibWrapper,
128 : const std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapper> &rdkGstreamerUtilsWrapper,
129 : const IGstInitialiser &gstInitialiser, std::unique_ptr<IFlushWatcher> &&flushWatcher,
130 : const std::shared_ptr<IGstSrcFactory> &gstSrcFactory, std::shared_ptr<common::ITimerFactory> timerFactory,
131 : std::unique_ptr<IGenericPlayerTaskFactory> taskFactory, std::unique_ptr<IWorkerThreadFactory> workerThreadFactory,
132 : std::unique_ptr<IGstDispatcherThreadFactory> gstDispatcherThreadFactory,
133 210 : std::shared_ptr<IGstProtectionMetadataHelperFactory> gstProtectionMetadataFactory)
134 210 : : m_gstPlayerClient(client), m_gstWrapper{gstWrapper}, m_glibWrapper{glibWrapper},
135 420 : m_rdkGstreamerUtilsWrapper{rdkGstreamerUtilsWrapper}, m_timerFactory{timerFactory},
136 630 : m_taskFactory{std::move(taskFactory)}, m_flushWatcher{std::move(flushWatcher)}
137 : {
138 210 : RIALTO_SERVER_LOG_DEBUG("GstGenericPlayer is constructed.");
139 :
140 210 : gstInitialiser.waitForInitialisation();
141 :
142 210 : m_context.decryptionService = &decryptionService;
143 :
144 210 : if ((!gstSrcFactory) || (!(m_context.gstSrc = gstSrcFactory->getGstSrc())))
145 : {
146 2 : throw std::runtime_error("Cannot create GstSrc");
147 : }
148 :
149 208 : if (!timerFactory)
150 : {
151 1 : throw std::runtime_error("TimeFactory is invalid");
152 : }
153 :
154 414 : if ((!gstProtectionMetadataFactory) ||
155 414 : (!(m_protectionMetadataWrapper = gstProtectionMetadataFactory->createProtectionMetadataWrapper(m_gstWrapper))))
156 : {
157 0 : throw std::runtime_error("Cannot create protection metadata wrapper");
158 : }
159 :
160 : // Ensure that rialtosrc has been initalised
161 207 : m_context.gstSrc->initSrc();
162 :
163 : // Start task thread
164 207 : if ((!workerThreadFactory) || (!(m_workerThread = workerThreadFactory->createWorkerThread())))
165 : {
166 0 : throw std::runtime_error("Failed to create the worker thread");
167 : }
168 :
169 : // Initialise pipeline
170 207 : switch (type)
171 : {
172 206 : case MediaType::MSE:
173 : {
174 206 : initMsePipeline();
175 206 : break;
176 : }
177 1 : default:
178 : {
179 1 : resetWorkerThread();
180 1 : throw std::runtime_error("Media type not supported");
181 : }
182 : }
183 :
184 : // Check the video requirements for a limited video.
185 : // If the video requirements are set to anything lower than the minimum, this playback is assumed to be a secondary
186 : // video in a dual video scenario.
187 206 : if ((kMinPrimaryVideoWidth > videoRequirements.maxWidth) || (kMinPrimaryVideoHeight > videoRequirements.maxHeight))
188 : {
189 8 : RIALTO_SERVER_LOG_MIL("Secondary video playback selected");
190 8 : bool westerossinkSecondaryVideoResult = setWesterossinkSecondaryVideo();
191 8 : bool ermContextResult = setErmContext();
192 8 : if (!westerossinkSecondaryVideoResult && !ermContextResult)
193 : {
194 1 : resetWorkerThread();
195 1 : termPipeline();
196 1 : throw std::runtime_error("Could not set secondary video");
197 : }
198 7 : }
199 : else
200 : {
201 198 : RIALTO_SERVER_LOG_MIL("Primary video playback selected");
202 : }
203 :
204 : m_gstDispatcherThread =
205 205 : gstDispatcherThreadFactory->createGstDispatcherThread(*this, m_context.pipeline, m_gstWrapper);
206 280 : }
207 :
208 410 : GstGenericPlayer::~GstGenericPlayer()
209 : {
210 205 : RIALTO_SERVER_LOG_DEBUG("GstGenericPlayer is destructed.");
211 :
212 205 : m_gstDispatcherThread.reset();
213 :
214 205 : resetWorkerThread();
215 :
216 205 : termPipeline();
217 410 : }
218 :
219 206 : void GstGenericPlayer::initMsePipeline()
220 : {
221 : // Make playbin
222 206 : m_context.pipeline = m_gstWrapper->gstElementFactoryMake("playbin", "media_pipeline");
223 : // Set pipeline flags
224 206 : setPlaybinFlags(true);
225 :
226 : // Set callbacks
227 206 : m_glibWrapper->gSignalConnect(m_context.pipeline, "source-setup", G_CALLBACK(&GstGenericPlayer::setupSource), this);
228 206 : m_glibWrapper->gSignalConnect(m_context.pipeline, "element-setup", G_CALLBACK(&GstGenericPlayer::setupElement), this);
229 206 : m_glibWrapper->gSignalConnect(m_context.pipeline, "deep-element-added",
230 : G_CALLBACK(&GstGenericPlayer::deepElementAdded), this);
231 :
232 : // Set uri
233 206 : m_glibWrapper->gObjectSet(m_context.pipeline, "uri", "rialto://", nullptr);
234 :
235 : // Check playsink
236 206 : GstElement *playsink = (m_gstWrapper->gstBinGetByName(GST_BIN(m_context.pipeline), "playsink"));
237 206 : if (playsink)
238 : {
239 205 : m_glibWrapper->gObjectSet(G_OBJECT(playsink), "send-event-mode", 0, nullptr);
240 205 : m_gstWrapper->gstObjectUnref(playsink);
241 : }
242 : else
243 : {
244 1 : GST_WARNING("No playsink ?!?!?");
245 : }
246 206 : RIALTO_SERVER_LOG_MIL("New RialtoServer's pipeline created");
247 : }
248 :
249 207 : void GstGenericPlayer::resetWorkerThread()
250 : {
251 : // Shutdown task thread
252 207 : m_workerThread->enqueueTask(m_taskFactory->createShutdown(*this));
253 207 : m_workerThread->join();
254 207 : m_workerThread.reset();
255 : }
256 :
257 206 : void GstGenericPlayer::termPipeline()
258 : {
259 206 : if (m_finishSourceSetupTimer && m_finishSourceSetupTimer->isActive())
260 : {
261 0 : m_finishSourceSetupTimer->cancel();
262 : }
263 :
264 206 : m_finishSourceSetupTimer.reset();
265 :
266 255 : for (auto &elem : m_context.streamInfo)
267 : {
268 49 : StreamInfo &streamInfo = elem.second;
269 51 : for (auto &buffer : streamInfo.buffers)
270 : {
271 2 : m_gstWrapper->gstBufferUnref(buffer);
272 : }
273 :
274 49 : streamInfo.buffers.clear();
275 : }
276 :
277 206 : m_taskFactory->createStop(m_context, *this)->execute();
278 206 : GstBus *bus = m_gstWrapper->gstPipelineGetBus(GST_PIPELINE(m_context.pipeline));
279 206 : m_gstWrapper->gstBusSetSyncHandler(bus, nullptr, nullptr, nullptr);
280 206 : m_gstWrapper->gstObjectUnref(bus);
281 :
282 206 : if (m_context.source)
283 : {
284 1 : m_gstWrapper->gstObjectUnref(m_context.source);
285 : }
286 206 : if (m_context.subtitleSink)
287 : {
288 4 : m_gstWrapper->gstObjectUnref(m_context.subtitleSink);
289 : }
290 :
291 : // Delete the pipeline
292 206 : m_gstWrapper->gstObjectUnref(m_context.pipeline);
293 :
294 206 : RIALTO_SERVER_LOG_MIL("RialtoServer's pipeline terminated");
295 : }
296 :
297 825 : unsigned GstGenericPlayer::getGstPlayFlag(const char *nick)
298 : {
299 : GFlagsClass *flagsClass =
300 825 : static_cast<GFlagsClass *>(m_glibWrapper->gTypeClassRef(m_glibWrapper->gTypeFromName("GstPlayFlags")));
301 825 : GFlagsValue *flag = m_glibWrapper->gFlagsGetValueByNick(flagsClass, nick);
302 825 : return flag ? flag->value : 0;
303 : }
304 :
305 1 : void GstGenericPlayer::setupSource(GstElement *pipeline, GstElement *source, GstGenericPlayer *self)
306 : {
307 1 : self->m_gstWrapper->gstObjectRef(source);
308 1 : if (self->m_workerThread)
309 : {
310 1 : self->m_workerThread->enqueueTask(self->m_taskFactory->createSetupSource(self->m_context, *self, source));
311 : }
312 : }
313 :
314 1 : void GstGenericPlayer::setupElement(GstElement *pipeline, GstElement *element, GstGenericPlayer *self)
315 : {
316 1 : RIALTO_SERVER_LOG_DEBUG("Element %s added to the pipeline", GST_ELEMENT_NAME(element));
317 1 : self->m_gstWrapper->gstObjectRef(element);
318 1 : if (self->m_workerThread)
319 : {
320 1 : self->m_workerThread->enqueueTask(self->m_taskFactory->createSetupElement(self->m_context, *self, element));
321 : }
322 : }
323 :
324 1 : void GstGenericPlayer::deepElementAdded(GstBin *pipeline, GstBin *bin, GstElement *element, GstGenericPlayer *self)
325 : {
326 1 : RIALTO_SERVER_LOG_DEBUG("Deep element %s added to the pipeline", GST_ELEMENT_NAME(element));
327 1 : if (self->m_workerThread)
328 : {
329 2 : self->m_workerThread->enqueueTask(
330 2 : self->m_taskFactory->createDeepElementAdded(self->m_context, *self, pipeline, bin, element));
331 : }
332 1 : }
333 :
334 1 : void GstGenericPlayer::attachSource(const std::unique_ptr<IMediaPipeline::MediaSource> &attachedSource)
335 : {
336 1 : if (m_workerThread)
337 : {
338 1 : m_workerThread->enqueueTask(m_taskFactory->createAttachSource(m_context, *this, attachedSource));
339 : }
340 : }
341 :
342 1 : void GstGenericPlayer::removeSource(const MediaSourceType &mediaSourceType)
343 : {
344 1 : if (m_workerThread)
345 : {
346 1 : m_workerThread->enqueueTask(m_taskFactory->createRemoveSource(m_context, *this, mediaSourceType));
347 : }
348 : }
349 :
350 2 : void GstGenericPlayer::allSourcesAttached()
351 : {
352 2 : if (m_workerThread)
353 : {
354 2 : m_workerThread->enqueueTask(m_taskFactory->createFinishSetupSource(m_context, *this));
355 : }
356 : }
357 :
358 1 : void GstGenericPlayer::attachSamples(const IMediaPipeline::MediaSegmentVector &mediaSegments)
359 : {
360 1 : if (m_workerThread)
361 : {
362 1 : m_workerThread->enqueueTask(m_taskFactory->createAttachSamples(m_context, *this, mediaSegments));
363 : }
364 : }
365 :
366 1 : void GstGenericPlayer::attachSamples(const std::shared_ptr<IDataReader> &dataReader)
367 : {
368 1 : if (m_workerThread)
369 : {
370 1 : m_workerThread->enqueueTask(m_taskFactory->createReadShmDataAndAttachSamples(m_context, *this, dataReader));
371 : }
372 : }
373 :
374 1 : void GstGenericPlayer::setPosition(std::int64_t position)
375 : {
376 1 : if (m_workerThread)
377 : {
378 1 : m_workerThread->enqueueTask(m_taskFactory->createSetPosition(m_context, *this, position));
379 : }
380 : }
381 :
382 1 : void GstGenericPlayer::setPlaybackRate(double rate)
383 : {
384 1 : if (m_workerThread)
385 : {
386 1 : m_workerThread->enqueueTask(m_taskFactory->createSetPlaybackRate(m_context, rate));
387 : }
388 : }
389 :
390 4 : bool GstGenericPlayer::getPosition(std::int64_t &position)
391 : {
392 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
393 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
394 4 : if (!m_context.pipeline || GST_STATE(m_context.pipeline) < GST_STATE_PAUSED)
395 : {
396 1 : RIALTO_SERVER_LOG_WARN("GetPosition failed. Pipeline is null or state < PAUSED");
397 1 : return false;
398 : }
399 3 : if (!m_gstWrapper->gstElementQueryPosition(m_context.pipeline, GST_FORMAT_TIME, &position))
400 : {
401 1 : RIALTO_SERVER_LOG_WARN("Query position failed");
402 1 : return false;
403 : }
404 2 : return true;
405 : }
406 :
407 38 : GstElement *GstGenericPlayer::getSink(const MediaSourceType &mediaSourceType) const
408 : {
409 38 : const char *kSinkName{nullptr};
410 38 : GstElement *sink{nullptr};
411 38 : switch (mediaSourceType)
412 : {
413 18 : case MediaSourceType::AUDIO:
414 18 : kSinkName = "audio-sink";
415 18 : break;
416 18 : case MediaSourceType::VIDEO:
417 18 : kSinkName = "video-sink";
418 18 : break;
419 2 : default:
420 2 : break;
421 : }
422 38 : if (!kSinkName)
423 : {
424 2 : RIALTO_SERVER_LOG_WARN("mediaSourceType not supported %d", static_cast<int>(mediaSourceType));
425 : }
426 : else
427 : {
428 36 : if (m_context.pipeline == nullptr)
429 : {
430 0 : RIALTO_SERVER_LOG_WARN("Pipeline is NULL!");
431 : }
432 : else
433 : {
434 36 : RIALTO_SERVER_LOG_DEBUG("Pipeline is valid: %p", m_context.pipeline);
435 : }
436 36 : m_glibWrapper->gObjectGet(m_context.pipeline, kSinkName, &sink, nullptr);
437 36 : if (sink)
438 : {
439 25 : GstElement *autoSink{sink};
440 25 : if (firebolt::rialto::MediaSourceType::VIDEO == mediaSourceType)
441 14 : autoSink = getSinkChildIfAutoVideoSink(sink);
442 11 : else if (firebolt::rialto::MediaSourceType::AUDIO == mediaSourceType)
443 11 : autoSink = getSinkChildIfAutoAudioSink(sink);
444 :
445 : // Is this an auto-sink?...
446 25 : if (autoSink != sink)
447 : {
448 2 : m_gstWrapper->gstObjectUnref(GST_OBJECT(sink));
449 :
450 : // increase the reference count of the auto sink
451 2 : sink = GST_ELEMENT(m_gstWrapper->gstObjectRef(GST_OBJECT(autoSink)));
452 : }
453 : }
454 : else
455 : {
456 11 : RIALTO_SERVER_LOG_WARN("%s could not be obtained", kSinkName);
457 : }
458 : }
459 38 : return sink;
460 : }
461 :
462 1 : void GstGenericPlayer::setSourceFlushed(const MediaSourceType &mediaSourceType)
463 : {
464 1 : m_flushWatcher->setFlushed(mediaSourceType);
465 : }
466 :
467 19 : GstElement *GstGenericPlayer::getDecoder(const MediaSourceType &mediaSourceType)
468 : {
469 19 : GstIterator *it = m_gstWrapper->gstBinIterateRecurse(GST_BIN(m_context.pipeline));
470 19 : GValue item = G_VALUE_INIT;
471 19 : gboolean done = FALSE;
472 :
473 28 : while (!done)
474 : {
475 21 : switch (m_gstWrapper->gstIteratorNext(it, &item))
476 : {
477 12 : case GST_ITERATOR_OK:
478 : {
479 12 : GstElement *element = GST_ELEMENT(m_glibWrapper->gValueGetObject(&item));
480 12 : GstElementFactory *factory = m_gstWrapper->gstElementGetFactory(element);
481 :
482 12 : if (factory)
483 : {
484 12 : GstElementFactoryListType type = GST_ELEMENT_FACTORY_TYPE_DECODER;
485 12 : if (mediaSourceType == MediaSourceType::AUDIO)
486 : {
487 12 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO;
488 : }
489 0 : else if (mediaSourceType == MediaSourceType::VIDEO)
490 : {
491 0 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO;
492 : }
493 :
494 12 : if (m_gstWrapper->gstElementFactoryListIsType(factory, type))
495 : {
496 12 : m_glibWrapper->gValueUnset(&item);
497 12 : m_gstWrapper->gstIteratorFree(it);
498 12 : return GST_ELEMENT(m_gstWrapper->gstObjectRef(element));
499 : }
500 : }
501 :
502 0 : m_glibWrapper->gValueUnset(&item);
503 0 : break;
504 : }
505 2 : case GST_ITERATOR_RESYNC:
506 2 : m_gstWrapper->gstIteratorResync(it);
507 2 : break;
508 7 : case GST_ITERATOR_ERROR:
509 : case GST_ITERATOR_DONE:
510 7 : done = TRUE;
511 7 : break;
512 : }
513 : }
514 :
515 7 : RIALTO_SERVER_LOG_WARN("Could not find decoder");
516 :
517 7 : m_glibWrapper->gValueUnset(&item);
518 7 : m_gstWrapper->gstIteratorFree(it);
519 :
520 7 : return nullptr;
521 : }
522 :
523 3 : GstElement *GstGenericPlayer::getParser(const MediaSourceType &mediaSourceType)
524 : {
525 3 : GstIterator *it = m_gstWrapper->gstBinIterateRecurse(GST_BIN(m_context.pipeline));
526 3 : GValue item = G_VALUE_INIT;
527 3 : gboolean done = FALSE;
528 :
529 4 : while (!done)
530 : {
531 3 : switch (m_gstWrapper->gstIteratorNext(it, &item))
532 : {
533 2 : case GST_ITERATOR_OK:
534 : {
535 2 : GstElement *element = GST_ELEMENT(m_glibWrapper->gValueGetObject(&item));
536 2 : GstElementFactory *factory = m_gstWrapper->gstElementGetFactory(element);
537 :
538 2 : if (factory)
539 : {
540 2 : GstElementFactoryListType type = GST_ELEMENT_FACTORY_TYPE_PARSER;
541 2 : if (mediaSourceType == MediaSourceType::AUDIO)
542 : {
543 0 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO;
544 : }
545 2 : else if (mediaSourceType == MediaSourceType::VIDEO)
546 : {
547 2 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO;
548 : }
549 :
550 2 : if (m_gstWrapper->gstElementFactoryListIsType(factory, type))
551 : {
552 2 : m_glibWrapper->gValueUnset(&item);
553 2 : m_gstWrapper->gstIteratorFree(it);
554 2 : return GST_ELEMENT(m_gstWrapper->gstObjectRef(element));
555 : }
556 : }
557 :
558 0 : m_glibWrapper->gValueUnset(&item);
559 0 : break;
560 : }
561 0 : case GST_ITERATOR_RESYNC:
562 0 : m_gstWrapper->gstIteratorResync(it);
563 0 : break;
564 1 : case GST_ITERATOR_ERROR:
565 : case GST_ITERATOR_DONE:
566 1 : done = TRUE;
567 1 : break;
568 : }
569 : }
570 :
571 1 : RIALTO_SERVER_LOG_WARN("Could not find parser");
572 :
573 1 : m_glibWrapper->gValueUnset(&item);
574 1 : m_gstWrapper->gstIteratorFree(it);
575 :
576 1 : return nullptr;
577 : }
578 :
579 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate>
580 5 : GstGenericPlayer::createAudioAttributes(const std::unique_ptr<IMediaPipeline::MediaSource> &source) const
581 : {
582 5 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate> audioAttributes;
583 5 : const IMediaPipeline::MediaSourceAudio *kSource = dynamic_cast<IMediaPipeline::MediaSourceAudio *>(source.get());
584 5 : if (kSource)
585 : {
586 4 : firebolt::rialto::AudioConfig audioConfig = kSource->getAudioConfig();
587 : audioAttributes =
588 12 : firebolt::rialto::wrappers::AudioAttributesPrivate{"", // param set below.
589 4 : audioConfig.numberOfChannels, audioConfig.sampleRate,
590 : 0, // used only in one of logs in rdk_gstreamer_utils, no
591 : // need to set this param.
592 : 0, // used only in one of logs in rdk_gstreamer_utils, no
593 : // need to set this param.
594 4 : audioConfig.codecSpecificConfig.data(),
595 : static_cast<std::uint32_t>(
596 4 : audioConfig.codecSpecificConfig.size())};
597 4 : if (source->getMimeType() == "audio/mp4" || source->getMimeType() == "audio/aac")
598 : {
599 2 : audioAttributes->m_codecParam = "mp4a";
600 : }
601 2 : else if (source->getMimeType() == "audio/x-eac3")
602 : {
603 1 : audioAttributes->m_codecParam = "ec-3";
604 : }
605 1 : else if (source->getMimeType() == "audio/b-wav" || source->getMimeType() == "audio/x-raw")
606 : {
607 1 : audioAttributes->m_codecParam = "lpcm";
608 : }
609 4 : }
610 : else
611 : {
612 1 : RIALTO_SERVER_LOG_ERROR("Failed to cast source");
613 : }
614 :
615 5 : return audioAttributes;
616 : }
617 :
618 1 : bool GstGenericPlayer::setImmediateOutput(const MediaSourceType &mediaSourceType, bool immediateOutputParam)
619 : {
620 1 : if (!m_workerThread)
621 0 : return false;
622 :
623 2 : m_workerThread->enqueueTask(
624 2 : m_taskFactory->createSetImmediateOutput(m_context, *this, mediaSourceType, immediateOutputParam));
625 1 : return true;
626 : }
627 :
628 5 : bool GstGenericPlayer::getImmediateOutput(const MediaSourceType &mediaSourceType, bool &immediateOutputRef)
629 : {
630 5 : bool returnValue{false};
631 5 : GstElement *sink{getSink(mediaSourceType)};
632 5 : if (sink)
633 : {
634 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "immediate-output"))
635 : {
636 2 : m_glibWrapper->gObjectGet(sink, "immediate-output", &immediateOutputRef, nullptr);
637 2 : returnValue = true;
638 : }
639 : else
640 : {
641 1 : RIALTO_SERVER_LOG_ERROR("immediate-output not supported in element %s", GST_ELEMENT_NAME(sink));
642 : }
643 3 : m_gstWrapper->gstObjectUnref(sink);
644 : }
645 : else
646 : {
647 2 : RIALTO_SERVER_LOG_ERROR("Failed to set immediate-output property, sink is NULL");
648 : }
649 :
650 5 : return returnValue;
651 : }
652 :
653 5 : bool GstGenericPlayer::getStats(const MediaSourceType &mediaSourceType, uint64_t &renderedFrames, uint64_t &droppedFrames)
654 : {
655 5 : bool returnValue{false};
656 5 : GstElement *sink{getSink(mediaSourceType)};
657 5 : if (sink)
658 : {
659 3 : GstStructure *stats{nullptr};
660 3 : m_glibWrapper->gObjectGet(sink, "stats", &stats, nullptr);
661 3 : if (!stats)
662 : {
663 1 : RIALTO_SERVER_LOG_ERROR("failed to get stats from '%s'", GST_ELEMENT_NAME(sink));
664 : }
665 : else
666 : {
667 : guint64 renderedFramesTmp;
668 : guint64 droppedFramesTmp;
669 3 : if (m_gstWrapper->gstStructureGetUint64(stats, "rendered", &renderedFramesTmp) &&
670 1 : m_gstWrapper->gstStructureGetUint64(stats, "dropped", &droppedFramesTmp))
671 : {
672 1 : renderedFrames = renderedFramesTmp;
673 1 : droppedFrames = droppedFramesTmp;
674 1 : returnValue = true;
675 : }
676 : else
677 : {
678 1 : RIALTO_SERVER_LOG_ERROR("failed to get 'rendered' or 'dropped' from structure (%s)",
679 : GST_ELEMENT_NAME(sink));
680 : }
681 2 : m_gstWrapper->gstStructureFree(stats);
682 : }
683 3 : m_gstWrapper->gstObjectUnref(sink);
684 : }
685 :
686 5 : return returnValue;
687 : }
688 :
689 4 : GstBuffer *GstGenericPlayer::createBuffer(const IMediaPipeline::MediaSegment &mediaSegment) const
690 : {
691 4 : GstBuffer *gstBuffer = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getDataLength(), nullptr);
692 4 : m_gstWrapper->gstBufferFill(gstBuffer, 0, mediaSegment.getData(), mediaSegment.getDataLength());
693 :
694 4 : if (mediaSegment.isEncrypted())
695 : {
696 3 : GstBuffer *keyId = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getKeyId().size(), nullptr);
697 3 : m_gstWrapper->gstBufferFill(keyId, 0, mediaSegment.getKeyId().data(), mediaSegment.getKeyId().size());
698 :
699 3 : GstBuffer *initVector = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getInitVector().size(), nullptr);
700 6 : m_gstWrapper->gstBufferFill(initVector, 0, mediaSegment.getInitVector().data(),
701 3 : mediaSegment.getInitVector().size());
702 3 : GstBuffer *subsamples{nullptr};
703 3 : if (!mediaSegment.getSubSamples().empty())
704 : {
705 3 : auto subsamplesRawSize = mediaSegment.getSubSamples().size() * (sizeof(guint16) + sizeof(guint32));
706 3 : guint8 *subsamplesRaw = static_cast<guint8 *>(m_glibWrapper->gMalloc(subsamplesRawSize));
707 : GstByteWriter writer;
708 3 : m_gstWrapper->gstByteWriterInitWithData(&writer, subsamplesRaw, subsamplesRawSize, FALSE);
709 :
710 6 : for (const auto &subSample : mediaSegment.getSubSamples())
711 : {
712 3 : m_gstWrapper->gstByteWriterPutUint16Be(&writer, subSample.numClearBytes);
713 3 : m_gstWrapper->gstByteWriterPutUint32Be(&writer, subSample.numEncryptedBytes);
714 : }
715 3 : subsamples = m_gstWrapper->gstBufferNewWrapped(subsamplesRaw, subsamplesRawSize);
716 : }
717 :
718 3 : uint32_t crypt = 0;
719 3 : uint32_t skip = 0;
720 3 : bool encryptionPatternSet = mediaSegment.getEncryptionPattern(crypt, skip);
721 :
722 3 : GstRialtoProtectionData data = {mediaSegment.getMediaKeySessionId(),
723 3 : static_cast<uint32_t>(mediaSegment.getSubSamples().size()),
724 3 : mediaSegment.getInitWithLast15(),
725 : keyId,
726 : initVector,
727 : subsamples,
728 6 : mediaSegment.getCipherMode(),
729 : crypt,
730 : skip,
731 : encryptionPatternSet,
732 6 : m_context.decryptionService};
733 :
734 3 : if (!m_protectionMetadataWrapper->addProtectionMetadata(gstBuffer, data))
735 : {
736 1 : RIALTO_SERVER_LOG_ERROR("Failed to add protection metadata");
737 1 : if (keyId)
738 : {
739 1 : m_gstWrapper->gstBufferUnref(keyId);
740 : }
741 1 : if (initVector)
742 : {
743 1 : m_gstWrapper->gstBufferUnref(initVector);
744 : }
745 1 : if (subsamples)
746 : {
747 1 : m_gstWrapper->gstBufferUnref(subsamples);
748 : }
749 : }
750 : }
751 :
752 4 : GST_BUFFER_TIMESTAMP(gstBuffer) = mediaSegment.getTimeStamp();
753 4 : GST_BUFFER_DURATION(gstBuffer) = mediaSegment.getDuration();
754 4 : return gstBuffer;
755 : }
756 :
757 4 : void GstGenericPlayer::notifyNeedMediaData(const MediaSourceType mediaSource)
758 : {
759 4 : auto elem = m_context.streamInfo.find(mediaSource);
760 4 : if (elem != m_context.streamInfo.end())
761 : {
762 2 : StreamInfo &streamInfo = elem->second;
763 2 : streamInfo.isNeedDataPending = false;
764 :
765 : // Send new NeedMediaData if we still need it
766 2 : if (m_gstPlayerClient && streamInfo.isDataNeeded)
767 : {
768 2 : streamInfo.isNeedDataPending = m_gstPlayerClient->notifyNeedMediaData(mediaSource);
769 : }
770 : }
771 : else
772 : {
773 2 : RIALTO_SERVER_LOG_WARN("Media type %s could not be found", common::convertMediaSourceType(mediaSource));
774 : }
775 4 : }
776 :
777 19 : void GstGenericPlayer::attachData(const firebolt::rialto::MediaSourceType mediaType)
778 : {
779 19 : auto elem = m_context.streamInfo.find(mediaType);
780 19 : if (elem != m_context.streamInfo.end())
781 : {
782 16 : StreamInfo &streamInfo = elem->second;
783 16 : if (streamInfo.buffers.empty() || !streamInfo.isDataNeeded)
784 : {
785 2 : return;
786 : }
787 :
788 14 : if (firebolt::rialto::MediaSourceType::SUBTITLE == mediaType)
789 : {
790 2 : setTextTrackPositionIfRequired(streamInfo.appSrc);
791 : }
792 : else
793 : {
794 36 : pushSampleIfRequired(streamInfo.appSrc, common::convertMediaSourceType(mediaType));
795 : }
796 14 : if (mediaType == firebolt::rialto::MediaSourceType::AUDIO)
797 : {
798 : // This needs to be done before gstAppSrcPushBuffer() is
799 : // called because it can free the memory
800 7 : m_context.lastAudioSampleTimestamps = static_cast<int64_t>(GST_BUFFER_PTS(streamInfo.buffers.back()));
801 : }
802 :
803 28 : for (GstBuffer *buffer : streamInfo.buffers)
804 : {
805 14 : m_gstWrapper->gstAppSrcPushBuffer(GST_APP_SRC(streamInfo.appSrc), buffer);
806 : }
807 14 : streamInfo.buffers.clear();
808 14 : streamInfo.isDataPushed = true;
809 :
810 14 : const bool kIsSingle = m_context.streamInfo.size() == 1;
811 14 : bool allOtherStreamsPushed = std::all_of(m_context.streamInfo.begin(), m_context.streamInfo.end(),
812 15 : [](const auto &entry) { return entry.second.isDataPushed; });
813 :
814 14 : if (!m_context.bufferedNotificationSent && (allOtherStreamsPushed || kIsSingle) && m_gstPlayerClient)
815 : {
816 1 : m_context.bufferedNotificationSent = true;
817 1 : m_gstPlayerClient->notifyNetworkState(NetworkState::BUFFERED);
818 1 : RIALTO_SERVER_LOG_MIL("Buffered NetworkState reached");
819 : }
820 14 : cancelUnderflow(mediaType);
821 :
822 14 : const auto eosInfoIt = m_context.endOfStreamInfo.find(mediaType);
823 14 : if (eosInfoIt != m_context.endOfStreamInfo.end() && eosInfoIt->second == EosState::PENDING)
824 : {
825 0 : setEos(mediaType);
826 : }
827 : }
828 : }
829 :
830 7 : void GstGenericPlayer::updateAudioCaps(int32_t rate, int32_t channels, const std::shared_ptr<CodecData> &codecData)
831 : {
832 7 : auto elem = m_context.streamInfo.find(firebolt::rialto::MediaSourceType::AUDIO);
833 7 : if (elem != m_context.streamInfo.end())
834 : {
835 6 : StreamInfo &streamInfo = elem->second;
836 :
837 6 : constexpr int kInvalidRate{0}, kInvalidChannels{0};
838 6 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(streamInfo.appSrc));
839 6 : GstCaps *newCaps = m_gstWrapper->gstCapsCopy(currentCaps);
840 :
841 6 : if (rate != kInvalidRate)
842 : {
843 3 : m_gstWrapper->gstCapsSetSimple(newCaps, "rate", G_TYPE_INT, rate, NULL);
844 : }
845 :
846 6 : if (channels != kInvalidChannels)
847 : {
848 3 : m_gstWrapper->gstCapsSetSimple(newCaps, "channels", G_TYPE_INT, channels, NULL);
849 : }
850 :
851 6 : setCodecData(newCaps, codecData);
852 :
853 6 : if (!m_gstWrapper->gstCapsIsEqual(currentCaps, newCaps))
854 : {
855 5 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(streamInfo.appSrc), newCaps);
856 : }
857 :
858 6 : m_gstWrapper->gstCapsUnref(newCaps);
859 6 : m_gstWrapper->gstCapsUnref(currentCaps);
860 : }
861 7 : }
862 :
863 8 : void GstGenericPlayer::updateVideoCaps(int32_t width, int32_t height, Fraction frameRate,
864 : const std::shared_ptr<CodecData> &codecData)
865 : {
866 8 : auto elem = m_context.streamInfo.find(firebolt::rialto::MediaSourceType::VIDEO);
867 8 : if (elem != m_context.streamInfo.end())
868 : {
869 7 : StreamInfo &streamInfo = elem->second;
870 :
871 7 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(streamInfo.appSrc));
872 7 : GstCaps *newCaps = m_gstWrapper->gstCapsCopy(currentCaps);
873 :
874 7 : if (width > 0)
875 : {
876 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "width", G_TYPE_INT, width, NULL);
877 : }
878 :
879 7 : if (height > 0)
880 : {
881 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "height", G_TYPE_INT, height, NULL);
882 : }
883 :
884 7 : if ((kUndefinedSize != frameRate.numerator) && (kUndefinedSize != frameRate.denominator))
885 : {
886 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "framerate", GST_TYPE_FRACTION, frameRate.numerator,
887 : frameRate.denominator, NULL);
888 : }
889 :
890 7 : setCodecData(newCaps, codecData);
891 :
892 7 : if (!m_gstWrapper->gstCapsIsEqual(currentCaps, newCaps))
893 : {
894 6 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(streamInfo.appSrc), newCaps);
895 : }
896 :
897 7 : m_gstWrapper->gstCapsUnref(currentCaps);
898 7 : m_gstWrapper->gstCapsUnref(newCaps);
899 : }
900 8 : }
901 :
902 5 : void GstGenericPlayer::addAudioClippingToBuffer(GstBuffer *buffer, uint64_t clippingStart, uint64_t clippingEnd) const
903 : {
904 5 : if (clippingStart || clippingEnd)
905 : {
906 4 : if (m_gstWrapper->gstBufferAddAudioClippingMeta(buffer, GST_FORMAT_TIME, clippingStart, clippingEnd))
907 : {
908 3 : RIALTO_SERVER_LOG_DEBUG("Added audio clipping to buffer %p, start: %" PRIu64 ", end %" PRIu64, buffer,
909 : clippingStart, clippingEnd);
910 : }
911 : else
912 : {
913 1 : RIALTO_SERVER_LOG_WARN("Failed to add audio clipping to buffer %p, start: %" PRIu64 ", end %" PRIu64,
914 : buffer, clippingStart, clippingEnd);
915 : }
916 : }
917 5 : }
918 :
919 13 : bool GstGenericPlayer::setCodecData(GstCaps *caps, const std::shared_ptr<CodecData> &codecData) const
920 : {
921 13 : if (codecData && CodecDataType::BUFFER == codecData->type)
922 : {
923 7 : gpointer memory = m_glibWrapper->gMemdup(codecData->data.data(), codecData->data.size());
924 7 : GstBuffer *buf = m_gstWrapper->gstBufferNewWrapped(memory, codecData->data.size());
925 7 : m_gstWrapper->gstCapsSetSimple(caps, "codec_data", GST_TYPE_BUFFER, buf, nullptr);
926 7 : m_gstWrapper->gstBufferUnref(buf);
927 7 : return true;
928 : }
929 6 : if (codecData && CodecDataType::STRING == codecData->type)
930 : {
931 2 : std::string codecDataStr(codecData->data.begin(), codecData->data.end());
932 2 : m_gstWrapper->gstCapsSetSimple(caps, "codec_data", G_TYPE_STRING, codecDataStr.c_str(), nullptr);
933 2 : return true;
934 : }
935 4 : return false;
936 : }
937 :
938 12 : void GstGenericPlayer::pushSampleIfRequired(GstElement *source, const std::string &typeStr)
939 : {
940 12 : auto initialPosition = m_context.initialPositions.find(source);
941 12 : if (m_context.initialPositions.end() == initialPosition)
942 : {
943 : // Sending initial sample not needed
944 7 : return;
945 : }
946 : // GstAppSrc does not replace segment, if it's the same as previous one.
947 : // It causes problems with position reporing in amlogic devices, so we need to push
948 : // two segments with different reset time value.
949 5 : pushAdditionalSegmentIfRequired(source);
950 :
951 10 : for (const auto &[position, resetTime, appliedRate, stopPosition] : initialPosition->second)
952 : {
953 6 : GstSeekFlags seekFlag = resetTime ? GST_SEEK_FLAG_FLUSH : GST_SEEK_FLAG_NONE;
954 6 : RIALTO_SERVER_LOG_DEBUG("Pushing new %s sample...", typeStr.c_str());
955 6 : GstSegment *segment{m_gstWrapper->gstSegmentNew()};
956 6 : m_gstWrapper->gstSegmentInit(segment, GST_FORMAT_TIME);
957 6 : if (!m_gstWrapper->gstSegmentDoSeek(segment, m_context.playbackRate, GST_FORMAT_TIME, seekFlag,
958 : GST_SEEK_TYPE_SET, position, GST_SEEK_TYPE_SET, stopPosition, nullptr))
959 : {
960 1 : RIALTO_SERVER_LOG_WARN("Segment seek failed.");
961 1 : m_gstWrapper->gstSegmentFree(segment);
962 1 : m_context.initialPositions.erase(initialPosition);
963 1 : return;
964 : }
965 5 : segment->applied_rate = appliedRate;
966 5 : RIALTO_SERVER_LOG_MIL("New %s segment: [%" GST_TIME_FORMAT ", %" GST_TIME_FORMAT
967 : "], rate: %f, appliedRate %f, reset_time: %d\n",
968 : typeStr.c_str(), GST_TIME_ARGS(segment->start), GST_TIME_ARGS(segment->stop),
969 : segment->rate, segment->applied_rate, resetTime);
970 :
971 5 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(source));
972 : // We can't pass buffer in GstSample, because implementation of gst_app_src_push_sample
973 : // uses gst_buffer_copy, which loses RialtoProtectionMeta (that causes problems with EME
974 : // for first frame).
975 5 : GstSample *sample = m_gstWrapper->gstSampleNew(nullptr, currentCaps, segment, nullptr);
976 5 : m_gstWrapper->gstAppSrcPushSample(GST_APP_SRC(source), sample);
977 5 : m_gstWrapper->gstSampleUnref(sample);
978 5 : m_gstWrapper->gstCapsUnref(currentCaps);
979 :
980 5 : m_gstWrapper->gstSegmentFree(segment);
981 : }
982 4 : m_context.currentPosition[source] = initialPosition->second.back();
983 4 : m_context.initialPositions.erase(initialPosition);
984 4 : return;
985 : }
986 :
987 5 : void GstGenericPlayer::pushAdditionalSegmentIfRequired(GstElement *source)
988 : {
989 5 : auto currentPosition = m_context.currentPosition.find(source);
990 5 : if (m_context.currentPosition.end() == currentPosition)
991 : {
992 4 : return;
993 : }
994 1 : auto initialPosition = m_context.initialPositions.find(source);
995 1 : if (m_context.initialPositions.end() == initialPosition)
996 : {
997 0 : return;
998 : }
999 2 : if (initialPosition->second.size() == 1 && initialPosition->second.back().resetTime &&
1000 1 : currentPosition->second == initialPosition->second.back())
1001 : {
1002 1 : RIALTO_SERVER_LOG_INFO("Adding additional segment with reset_time = false");
1003 1 : SegmentData additionalSegment = initialPosition->second.back();
1004 1 : additionalSegment.resetTime = false;
1005 1 : initialPosition->second.push_back(additionalSegment);
1006 : }
1007 : }
1008 :
1009 2 : void GstGenericPlayer::setTextTrackPositionIfRequired(GstElement *source)
1010 : {
1011 2 : auto initialPosition = m_context.initialPositions.find(source);
1012 2 : if (m_context.initialPositions.end() == initialPosition)
1013 : {
1014 : // Sending initial sample not needed
1015 1 : return;
1016 : }
1017 :
1018 1 : RIALTO_SERVER_LOG_MIL("New subtitle position set %" GST_TIME_FORMAT,
1019 : GST_TIME_ARGS(initialPosition->second.back().position));
1020 1 : m_glibWrapper->gObjectSet(m_context.subtitleSink, "position",
1021 1 : static_cast<guint64>(initialPosition->second.back().position), nullptr);
1022 :
1023 1 : m_context.initialPositions.erase(initialPosition);
1024 : }
1025 :
1026 7 : bool GstGenericPlayer::reattachSource(const std::unique_ptr<IMediaPipeline::MediaSource> &source)
1027 : {
1028 7 : if (m_context.streamInfo.find(source->getType()) == m_context.streamInfo.end())
1029 : {
1030 1 : RIALTO_SERVER_LOG_ERROR("Unable to switch source, type does not exist");
1031 1 : return false;
1032 : }
1033 6 : if (source->getMimeType().empty())
1034 : {
1035 1 : RIALTO_SERVER_LOG_WARN("Skip switch audio source. Unknown mime type");
1036 1 : return false;
1037 : }
1038 5 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate> audioAttributes{createAudioAttributes(source)};
1039 5 : if (!audioAttributes)
1040 : {
1041 1 : RIALTO_SERVER_LOG_ERROR("Failed to create audio attributes");
1042 1 : return false;
1043 : }
1044 : std::int64_t currentDispPts64b; // In netflix code it's currentDisplayPosition + offset
1045 4 : m_gstWrapper->gstElementQueryPosition(m_context.pipeline, GST_FORMAT_TIME, ¤tDispPts64b);
1046 4 : long long currentDispPts = currentDispPts64b; // NOLINT(runtime/int)
1047 4 : GstCaps *caps{createCapsFromMediaSource(m_gstWrapper, m_glibWrapper, source)};
1048 4 : GstAppSrc *appSrc{GST_APP_SRC(m_context.streamInfo[source->getType()].appSrc)};
1049 4 : GstCaps *oldCaps = m_gstWrapper->gstAppSrcGetCaps(appSrc);
1050 4 : if ((!oldCaps) || (!m_gstWrapper->gstCapsIsEqual(caps, oldCaps)))
1051 : {
1052 3 : RIALTO_SERVER_LOG_DEBUG("Caps not equal. Perform audio track codec channel switch.");
1053 3 : int sampleAttributes{
1054 : 0}; // rdk_gstreamer_utils::performAudioTrackCodecChannelSwitch checks if this param != NULL only.
1055 3 : std::uint32_t status{0}; // must be 0 to make rdk_gstreamer_utils::performAudioTrackCodecChannelSwitch work
1056 3 : unsigned int ui32Delay{0}; // output param
1057 3 : long long audioChangeTargetPts{-1}; // NOLINT(runtime/int) output param. Set audioChangeTargetPts =
1058 : // currentDispPts in rdk_gstreamer_utils function stub
1059 3 : unsigned int audioChangeStage{0}; // Output param. Set to AUDCHG_ALIGN in rdk_gstreamer_utils function stub
1060 3 : gchar *oldCapsCStr = m_gstWrapper->gstCapsToString(oldCaps);
1061 3 : std::string oldCapsStr = std::string(oldCapsCStr);
1062 3 : m_glibWrapper->gFree(oldCapsCStr);
1063 3 : bool audioAac{oldCapsStr.find("audio/mpeg") != std::string::npos};
1064 3 : bool svpEnabled{true}; // assume always true
1065 3 : bool retVal{false}; // Output param. Set to TRUE in rdk_gstreamer_utils function stub
1066 : bool result =
1067 3 : m_rdkGstreamerUtilsWrapper
1068 6 : ->performAudioTrackCodecChannelSwitch(&m_context.playbackGroup, &sampleAttributes, &(*audioAttributes),
1069 : &status, &ui32Delay, &audioChangeTargetPts, ¤tDispPts,
1070 : &audioChangeStage,
1071 : &caps, // may fail for amlogic - that implementation changes
1072 : // this parameter, it's probably used by Netflix later
1073 3 : &audioAac, svpEnabled, GST_ELEMENT(appSrc), &retVal);
1074 :
1075 3 : if (!result || !retVal)
1076 : {
1077 3 : RIALTO_SERVER_LOG_WARN("performAudioTrackCodecChannelSwitch failed! Result: %d, retval %d", result, retVal);
1078 : }
1079 : }
1080 : else
1081 : {
1082 1 : RIALTO_SERVER_LOG_DEBUG("Skip switching audio source - caps are the same.");
1083 : }
1084 :
1085 4 : m_context.lastAudioSampleTimestamps = currentDispPts;
1086 4 : if (caps)
1087 4 : m_gstWrapper->gstCapsUnref(caps);
1088 4 : if (oldCaps)
1089 4 : m_gstWrapper->gstCapsUnref(oldCaps);
1090 :
1091 4 : return true;
1092 5 : }
1093 :
1094 88 : void GstGenericPlayer::scheduleNeedMediaData(GstAppSrc *src)
1095 : {
1096 88 : if (m_workerThread)
1097 : {
1098 88 : m_workerThread->enqueueTask(m_taskFactory->createNeedData(m_context, *this, src));
1099 : }
1100 : }
1101 :
1102 1 : void GstGenericPlayer::scheduleEnoughData(GstAppSrc *src)
1103 : {
1104 1 : if (m_workerThread)
1105 : {
1106 1 : m_workerThread->enqueueTask(m_taskFactory->createEnoughData(m_context, src));
1107 : }
1108 : }
1109 :
1110 3 : void GstGenericPlayer::scheduleAudioUnderflow()
1111 : {
1112 3 : if (m_workerThread)
1113 : {
1114 3 : bool underflowEnabled = m_context.isPlaying && !m_context.audioSourceRemoved;
1115 6 : m_workerThread->enqueueTask(
1116 6 : m_taskFactory->createUnderflow(m_context, *this, underflowEnabled, MediaSourceType::AUDIO));
1117 : }
1118 3 : }
1119 :
1120 2 : void GstGenericPlayer::scheduleVideoUnderflow()
1121 : {
1122 2 : if (m_workerThread)
1123 : {
1124 2 : bool underflowEnabled = m_context.isPlaying;
1125 4 : m_workerThread->enqueueTask(
1126 4 : m_taskFactory->createUnderflow(m_context, *this, underflowEnabled, MediaSourceType::VIDEO));
1127 : }
1128 2 : }
1129 :
1130 1 : void GstGenericPlayer::scheduleAllSourcesAttached()
1131 : {
1132 1 : allSourcesAttached();
1133 : }
1134 :
1135 14 : void GstGenericPlayer::cancelUnderflow(firebolt::rialto::MediaSourceType mediaSource)
1136 : {
1137 14 : auto elem = m_context.streamInfo.find(mediaSource);
1138 14 : if (elem != m_context.streamInfo.end())
1139 : {
1140 14 : StreamInfo &streamInfo = elem->second;
1141 14 : if (!streamInfo.underflowOccured)
1142 : {
1143 11 : return;
1144 : }
1145 :
1146 3 : RIALTO_SERVER_LOG_DEBUG("Cancelling %s underflow", common::convertMediaSourceType(mediaSource));
1147 3 : streamInfo.underflowOccured = false;
1148 : }
1149 : }
1150 :
1151 1 : void GstGenericPlayer::play()
1152 : {
1153 1 : if (m_workerThread)
1154 : {
1155 1 : m_workerThread->enqueueTask(m_taskFactory->createPlay(*this));
1156 : }
1157 : }
1158 :
1159 1 : void GstGenericPlayer::pause()
1160 : {
1161 1 : if (m_workerThread)
1162 : {
1163 1 : m_workerThread->enqueueTask(m_taskFactory->createPause(m_context, *this));
1164 : }
1165 : }
1166 :
1167 1 : void GstGenericPlayer::stop()
1168 : {
1169 1 : if (m_workerThread)
1170 : {
1171 1 : m_workerThread->enqueueTask(m_taskFactory->createStop(m_context, *this));
1172 : }
1173 : }
1174 :
1175 4 : bool GstGenericPlayer::changePipelineState(GstState newState)
1176 : {
1177 4 : if (!m_context.pipeline)
1178 : {
1179 1 : RIALTO_SERVER_LOG_ERROR("Change state failed - pipeline is nullptr");
1180 1 : if (m_gstPlayerClient)
1181 1 : m_gstPlayerClient->notifyPlaybackState(PlaybackState::FAILURE);
1182 1 : return false;
1183 : }
1184 3 : if (m_gstWrapper->gstElementSetState(m_context.pipeline, newState) == GST_STATE_CHANGE_FAILURE)
1185 : {
1186 1 : RIALTO_SERVER_LOG_ERROR("Change state failed - Gstreamer returned an error");
1187 1 : if (m_gstPlayerClient)
1188 1 : m_gstPlayerClient->notifyPlaybackState(PlaybackState::FAILURE);
1189 1 : return false;
1190 : }
1191 2 : return true;
1192 : }
1193 :
1194 1 : void GstGenericPlayer::setVideoGeometry(int x, int y, int width, int height)
1195 : {
1196 1 : if (m_workerThread)
1197 : {
1198 2 : m_workerThread->enqueueTask(
1199 2 : m_taskFactory->createSetVideoGeometry(m_context, *this, Rectangle{x, y, width, height}));
1200 : }
1201 1 : }
1202 :
1203 1 : void GstGenericPlayer::setEos(const firebolt::rialto::MediaSourceType &type)
1204 : {
1205 1 : if (m_workerThread)
1206 : {
1207 1 : m_workerThread->enqueueTask(m_taskFactory->createEos(m_context, *this, type));
1208 : }
1209 : }
1210 :
1211 4 : bool GstGenericPlayer::setVideoSinkRectangle()
1212 : {
1213 4 : bool result = false;
1214 4 : GstElement *videoSink{getSink(MediaSourceType::VIDEO)};
1215 4 : if (videoSink)
1216 : {
1217 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "rectangle"))
1218 : {
1219 : std::string rect =
1220 4 : std::to_string(m_context.pendingGeometry.x) + ',' + std::to_string(m_context.pendingGeometry.y) + ',' +
1221 6 : std::to_string(m_context.pendingGeometry.width) + ',' + std::to_string(m_context.pendingGeometry.height);
1222 2 : m_glibWrapper->gObjectSet(videoSink, "rectangle", rect.c_str(), nullptr);
1223 2 : m_context.pendingGeometry.clear();
1224 2 : result = true;
1225 : }
1226 : else
1227 : {
1228 1 : RIALTO_SERVER_LOG_ERROR("Failed to set the video rectangle");
1229 : }
1230 3 : m_gstWrapper->gstObjectUnref(videoSink);
1231 : }
1232 :
1233 4 : return result;
1234 : }
1235 :
1236 3 : bool GstGenericPlayer::setImmediateOutput()
1237 : {
1238 3 : bool result{false};
1239 3 : if (m_context.pendingImmediateOutputForVideo.has_value())
1240 : {
1241 3 : GstElement *sink{getSink(MediaSourceType::VIDEO)};
1242 3 : if (sink)
1243 : {
1244 2 : bool immediateOutput{m_context.pendingImmediateOutputForVideo.value()};
1245 2 : RIALTO_SERVER_LOG_DEBUG("Set immediate-output to %s", immediateOutput ? "TRUE" : "FALSE");
1246 :
1247 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "immediate-output"))
1248 : {
1249 1 : gboolean immediateOutputGboolean{immediateOutput ? TRUE : FALSE};
1250 1 : m_glibWrapper->gObjectSet(sink, "immediate-output", immediateOutputGboolean, nullptr);
1251 1 : result = true;
1252 : }
1253 : else
1254 : {
1255 1 : RIALTO_SERVER_LOG_ERROR("Failed to set immediate-output property on sink '%s'", GST_ELEMENT_NAME(sink));
1256 : }
1257 2 : m_context.pendingImmediateOutputForVideo.reset();
1258 2 : m_gstWrapper->gstObjectUnref(sink);
1259 : }
1260 : else
1261 : {
1262 1 : RIALTO_SERVER_LOG_DEBUG("Pending an immediate-output, sink is NULL");
1263 : }
1264 : }
1265 3 : return result;
1266 : }
1267 :
1268 4 : bool GstGenericPlayer::setShowVideoWindow()
1269 : {
1270 4 : if (!m_context.pendingShowVideoWindow.has_value())
1271 : {
1272 1 : RIALTO_SERVER_LOG_WARN("No show video window value to be set. Aborting...");
1273 1 : return false;
1274 : }
1275 :
1276 3 : GstElement *videoSink{getSink(MediaSourceType::VIDEO)};
1277 3 : if (!videoSink)
1278 : {
1279 1 : RIALTO_SERVER_LOG_DEBUG("Setting show video window queued. Video sink is NULL");
1280 1 : return false;
1281 : }
1282 2 : bool result{false};
1283 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "show-video-window"))
1284 : {
1285 1 : m_glibWrapper->gObjectSet(videoSink, "show-video-window", m_context.pendingShowVideoWindow.value(), nullptr);
1286 1 : result = true;
1287 : }
1288 : else
1289 : {
1290 1 : RIALTO_SERVER_LOG_ERROR("Setting show video window failed. Property does not exist");
1291 : }
1292 2 : m_context.pendingShowVideoWindow.reset();
1293 2 : m_gstWrapper->gstObjectUnref(GST_OBJECT(videoSink));
1294 2 : return result;
1295 : }
1296 :
1297 4 : bool GstGenericPlayer::setLowLatency()
1298 : {
1299 4 : bool result{false};
1300 4 : if (m_context.pendingLowLatency.has_value())
1301 : {
1302 4 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1303 4 : if (sink)
1304 : {
1305 3 : bool lowLatency{m_context.pendingLowLatency.value()};
1306 3 : RIALTO_SERVER_LOG_DEBUG("Set low-latency to %s", lowLatency ? "TRUE" : "FALSE");
1307 :
1308 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "low-latency"))
1309 : {
1310 2 : gboolean lowLatencyGboolean{lowLatency ? TRUE : FALSE};
1311 2 : m_glibWrapper->gObjectSet(sink, "low-latency", lowLatencyGboolean, nullptr);
1312 2 : result = true;
1313 : }
1314 : else
1315 : {
1316 1 : RIALTO_SERVER_LOG_ERROR("Failed to set low-latency property on sink '%s'", GST_ELEMENT_NAME(sink));
1317 : }
1318 3 : m_context.pendingLowLatency.reset();
1319 3 : m_gstWrapper->gstObjectUnref(sink);
1320 : }
1321 : else
1322 : {
1323 1 : RIALTO_SERVER_LOG_DEBUG("Pending low-latency, sink is NULL");
1324 : }
1325 : }
1326 4 : return result;
1327 : }
1328 :
1329 3 : bool GstGenericPlayer::setSync()
1330 : {
1331 3 : bool result{false};
1332 3 : if (m_context.pendingSync.has_value())
1333 : {
1334 3 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1335 3 : if (sink)
1336 : {
1337 2 : bool sync{m_context.pendingSync.value()};
1338 2 : RIALTO_SERVER_LOG_DEBUG("Set sync to %s", sync ? "TRUE" : "FALSE");
1339 :
1340 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "sync"))
1341 : {
1342 1 : gboolean syncGboolean{sync ? TRUE : FALSE};
1343 1 : m_glibWrapper->gObjectSet(sink, "sync", syncGboolean, nullptr);
1344 1 : result = true;
1345 : }
1346 : else
1347 : {
1348 1 : RIALTO_SERVER_LOG_ERROR("Failed to set sync property on sink '%s'", GST_ELEMENT_NAME(sink));
1349 : }
1350 2 : m_context.pendingSync.reset();
1351 2 : m_gstWrapper->gstObjectUnref(sink);
1352 : }
1353 : else
1354 : {
1355 1 : RIALTO_SERVER_LOG_DEBUG("Pending sync, sink is NULL");
1356 : }
1357 : }
1358 3 : return result;
1359 : }
1360 :
1361 3 : bool GstGenericPlayer::setSyncOff()
1362 : {
1363 3 : bool result{false};
1364 3 : if (m_context.pendingSyncOff.has_value())
1365 : {
1366 3 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1367 3 : if (decoder)
1368 : {
1369 2 : bool syncOff{m_context.pendingSyncOff.value()};
1370 2 : RIALTO_SERVER_LOG_DEBUG("Set sync-off to %s", syncOff ? "TRUE" : "FALSE");
1371 :
1372 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "sync-off"))
1373 : {
1374 1 : gboolean syncOffGboolean{decoder ? TRUE : FALSE};
1375 1 : m_glibWrapper->gObjectSet(decoder, "sync-off", syncOffGboolean, nullptr);
1376 1 : result = true;
1377 : }
1378 : else
1379 : {
1380 1 : RIALTO_SERVER_LOG_ERROR("Failed to set sync-off property on decoder '%s'", GST_ELEMENT_NAME(decoder));
1381 : }
1382 2 : m_context.pendingSyncOff.reset();
1383 2 : m_gstWrapper->gstObjectUnref(decoder);
1384 : }
1385 : else
1386 : {
1387 1 : RIALTO_SERVER_LOG_DEBUG("Pending sync-off, decoder is NULL");
1388 : }
1389 : }
1390 3 : return result;
1391 : }
1392 :
1393 6 : bool GstGenericPlayer::setStreamSyncMode(const MediaSourceType &type)
1394 : {
1395 6 : bool result{false};
1396 6 : int32_t streamSyncMode{0};
1397 : {
1398 6 : std::unique_lock lock{m_context.propertyMutex};
1399 6 : if (m_context.pendingStreamSyncMode.find(type) == m_context.pendingStreamSyncMode.end())
1400 : {
1401 0 : return false;
1402 : }
1403 6 : streamSyncMode = m_context.pendingStreamSyncMode[type];
1404 : }
1405 6 : if (MediaSourceType::AUDIO == type)
1406 : {
1407 3 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1408 3 : if (!decoder)
1409 : {
1410 1 : RIALTO_SERVER_LOG_DEBUG("Pending stream-sync-mode, decoder is NULL");
1411 1 : return false;
1412 : }
1413 :
1414 2 : RIALTO_SERVER_LOG_DEBUG("Set stream-sync-mode to %d", streamSyncMode);
1415 :
1416 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "stream-sync-mode"))
1417 : {
1418 1 : gint streamSyncModeGint{static_cast<gint>(streamSyncMode)};
1419 1 : m_glibWrapper->gObjectSet(decoder, "stream-sync-mode", streamSyncModeGint, nullptr);
1420 1 : result = true;
1421 : }
1422 : else
1423 : {
1424 1 : RIALTO_SERVER_LOG_ERROR("Failed to set stream-sync-mode property on decoder '%s'", GST_ELEMENT_NAME(decoder));
1425 : }
1426 2 : m_gstWrapper->gstObjectUnref(decoder);
1427 2 : std::unique_lock lock{m_context.propertyMutex};
1428 2 : m_context.pendingStreamSyncMode.erase(type);
1429 : }
1430 3 : else if (MediaSourceType::VIDEO == type)
1431 : {
1432 3 : GstElement *parser = getParser(MediaSourceType::VIDEO);
1433 3 : if (!parser)
1434 : {
1435 1 : RIALTO_SERVER_LOG_DEBUG("Pending syncmode-streaming, parser is NULL");
1436 1 : return false;
1437 : }
1438 :
1439 2 : gboolean streamSyncModeBoolean{static_cast<gboolean>(streamSyncMode)};
1440 2 : RIALTO_SERVER_LOG_DEBUG("Set syncmode-streaming to %d", streamSyncMode);
1441 :
1442 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(parser), "syncmode-streaming"))
1443 : {
1444 1 : m_glibWrapper->gObjectSet(parser, "syncmode-streaming", streamSyncModeBoolean, nullptr);
1445 1 : result = true;
1446 : }
1447 : else
1448 : {
1449 1 : RIALTO_SERVER_LOG_ERROR("Failed to set syncmode-streaming property on parser '%s'", GST_ELEMENT_NAME(parser));
1450 : }
1451 2 : m_gstWrapper->gstObjectUnref(parser);
1452 2 : std::unique_lock lock{m_context.propertyMutex};
1453 2 : m_context.pendingStreamSyncMode.erase(type);
1454 : }
1455 4 : return result;
1456 : }
1457 :
1458 3 : bool GstGenericPlayer::setRenderFrame()
1459 : {
1460 3 : bool result{false};
1461 3 : if (m_context.pendingRenderFrame)
1462 : {
1463 5 : static const std::string kStepOnPrerollPropertyName = "frame-step-on-preroll";
1464 3 : GstElement *sink{getSink(MediaSourceType::VIDEO)};
1465 3 : if (sink)
1466 : {
1467 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), kStepOnPrerollPropertyName.c_str()))
1468 : {
1469 1 : RIALTO_SERVER_LOG_INFO("Rendering preroll");
1470 :
1471 1 : m_glibWrapper->gObjectSet(sink, kStepOnPrerollPropertyName.c_str(), 1, nullptr);
1472 1 : m_gstWrapper->gstElementSendEvent(sink, m_gstWrapper->gstEventNewStep(GST_FORMAT_BUFFERS, 1, 1.0, true,
1473 : false));
1474 1 : m_glibWrapper->gObjectSet(sink, kStepOnPrerollPropertyName.c_str(), 0, nullptr);
1475 1 : result = true;
1476 : }
1477 : else
1478 : {
1479 1 : RIALTO_SERVER_LOG_ERROR("Video sink doesn't have property `%s`", kStepOnPrerollPropertyName.c_str());
1480 : }
1481 2 : m_gstWrapper->gstObjectUnref(sink);
1482 2 : m_context.pendingRenderFrame = false;
1483 : }
1484 : else
1485 : {
1486 1 : RIALTO_SERVER_LOG_DEBUG("Pending render frame, sink is NULL");
1487 : }
1488 : }
1489 3 : return result;
1490 : }
1491 :
1492 3 : bool GstGenericPlayer::setBufferingLimit()
1493 : {
1494 3 : bool result{false};
1495 3 : guint bufferingLimit{0};
1496 : {
1497 3 : std::unique_lock lock{m_context.propertyMutex};
1498 3 : if (!m_context.pendingBufferingLimit.has_value())
1499 : {
1500 0 : return false;
1501 : }
1502 3 : bufferingLimit = static_cast<guint>(m_context.pendingBufferingLimit.value());
1503 : }
1504 :
1505 3 : GstElement *decoder{getDecoder(MediaSourceType::AUDIO)};
1506 3 : if (decoder)
1507 : {
1508 2 : RIALTO_SERVER_LOG_DEBUG("Set limit-buffering-ms to %u", bufferingLimit);
1509 :
1510 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "limit-buffering-ms"))
1511 : {
1512 1 : m_glibWrapper->gObjectSet(decoder, "limit-buffering-ms", bufferingLimit, nullptr);
1513 1 : result = true;
1514 : }
1515 : else
1516 : {
1517 1 : RIALTO_SERVER_LOG_ERROR("Failed to set limit-buffering-ms property on decoder '%s'",
1518 : GST_ELEMENT_NAME(decoder));
1519 : }
1520 2 : m_gstWrapper->gstObjectUnref(decoder);
1521 2 : std::unique_lock lock{m_context.propertyMutex};
1522 2 : m_context.pendingBufferingLimit.reset();
1523 : }
1524 : else
1525 : {
1526 1 : RIALTO_SERVER_LOG_DEBUG("Pending limit-buffering-ms, decoder is NULL");
1527 : }
1528 3 : return result;
1529 : }
1530 :
1531 2 : bool GstGenericPlayer::setUseBuffering()
1532 : {
1533 2 : std::unique_lock lock{m_context.propertyMutex};
1534 2 : if (m_context.pendingUseBuffering.has_value())
1535 : {
1536 2 : if (m_context.playbackGroup.m_curAudioDecodeBin)
1537 : {
1538 1 : gboolean useBufferingGboolean{m_context.pendingUseBuffering.value() ? TRUE : FALSE};
1539 1 : RIALTO_SERVER_LOG_DEBUG("Set use-buffering to %d", useBufferingGboolean);
1540 1 : m_glibWrapper->gObjectSet(m_context.playbackGroup.m_curAudioDecodeBin, "use-buffering",
1541 : useBufferingGboolean, nullptr);
1542 1 : m_context.pendingUseBuffering.reset();
1543 1 : return true;
1544 : }
1545 : else
1546 : {
1547 1 : RIALTO_SERVER_LOG_DEBUG("Pending use-buffering, decodebin is NULL");
1548 : }
1549 : }
1550 1 : return false;
1551 2 : }
1552 :
1553 8 : bool GstGenericPlayer::setWesterossinkSecondaryVideo()
1554 : {
1555 8 : bool result = false;
1556 8 : GstElementFactory *factory = m_gstWrapper->gstElementFactoryFind("westerossink");
1557 8 : if (factory)
1558 : {
1559 7 : GstElement *videoSink = m_gstWrapper->gstElementFactoryCreate(factory, nullptr);
1560 7 : if (videoSink)
1561 : {
1562 5 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "res-usage"))
1563 : {
1564 4 : m_glibWrapper->gObjectSet(videoSink, "res-usage", 0x0u, nullptr);
1565 4 : m_glibWrapper->gObjectSet(m_context.pipeline, "video-sink", videoSink, nullptr);
1566 4 : result = true;
1567 : }
1568 : else
1569 : {
1570 1 : RIALTO_SERVER_LOG_ERROR("Failed to set the westerossink res-usage");
1571 1 : m_gstWrapper->gstObjectUnref(GST_OBJECT(videoSink));
1572 : }
1573 : }
1574 : else
1575 : {
1576 2 : RIALTO_SERVER_LOG_ERROR("Failed to create the westerossink");
1577 : }
1578 :
1579 7 : m_gstWrapper->gstObjectUnref(GST_OBJECT(factory));
1580 : }
1581 : else
1582 : {
1583 : // No westeros sink
1584 1 : result = true;
1585 : }
1586 :
1587 8 : return result;
1588 : }
1589 :
1590 8 : bool GstGenericPlayer::setErmContext()
1591 : {
1592 8 : bool result = false;
1593 8 : GstContext *context = m_gstWrapper->gstContextNew("erm", false);
1594 8 : if (context)
1595 : {
1596 6 : GstStructure *contextStructure = m_gstWrapper->gstContextWritableStructure(context);
1597 6 : if (contextStructure)
1598 : {
1599 5 : m_gstWrapper->gstStructureSet(contextStructure, "res-usage", G_TYPE_UINT, 0x0u, nullptr);
1600 5 : m_gstWrapper->gstElementSetContext(GST_ELEMENT(m_context.pipeline), context);
1601 5 : result = true;
1602 : }
1603 : else
1604 : {
1605 1 : RIALTO_SERVER_LOG_ERROR("Failed to create the erm structure");
1606 : }
1607 6 : m_gstWrapper->gstContextUnref(context);
1608 : }
1609 : else
1610 : {
1611 2 : RIALTO_SERVER_LOG_ERROR("Failed to create the erm context");
1612 : }
1613 :
1614 8 : return result;
1615 : }
1616 :
1617 6 : void GstGenericPlayer::startPositionReportingAndCheckAudioUnderflowTimer()
1618 : {
1619 6 : if (m_positionReportingAndCheckAudioUnderflowTimer && m_positionReportingAndCheckAudioUnderflowTimer->isActive())
1620 : {
1621 1 : return;
1622 : }
1623 :
1624 15 : m_positionReportingAndCheckAudioUnderflowTimer = m_timerFactory->createTimer(
1625 : kPositionReportTimerMs,
1626 10 : [this]()
1627 : {
1628 1 : if (m_workerThread)
1629 : {
1630 1 : m_workerThread->enqueueTask(m_taskFactory->createReportPosition(m_context));
1631 1 : m_workerThread->enqueueTask(m_taskFactory->createCheckAudioUnderflow(m_context, *this));
1632 : }
1633 1 : },
1634 5 : firebolt::rialto::common::TimerType::PERIODIC);
1635 : }
1636 :
1637 4 : void GstGenericPlayer::stopPositionReportingAndCheckAudioUnderflowTimer()
1638 : {
1639 4 : if (m_positionReportingAndCheckAudioUnderflowTimer && m_positionReportingAndCheckAudioUnderflowTimer->isActive())
1640 : {
1641 1 : m_positionReportingAndCheckAudioUnderflowTimer->cancel();
1642 1 : m_positionReportingAndCheckAudioUnderflowTimer.reset();
1643 : }
1644 4 : }
1645 :
1646 2 : void GstGenericPlayer::stopWorkerThread()
1647 : {
1648 2 : if (m_workerThread)
1649 : {
1650 2 : m_workerThread->stop();
1651 : }
1652 : }
1653 :
1654 0 : void GstGenericPlayer::setPendingPlaybackRate()
1655 : {
1656 0 : RIALTO_SERVER_LOG_INFO("Setting pending playback rate");
1657 0 : setPlaybackRate(m_context.pendingPlaybackRate);
1658 : }
1659 :
1660 1 : void GstGenericPlayer::renderFrame()
1661 : {
1662 1 : if (m_workerThread)
1663 : {
1664 1 : m_workerThread->enqueueTask(m_taskFactory->createRenderFrame(m_context, *this));
1665 : }
1666 : }
1667 :
1668 16 : void GstGenericPlayer::setVolume(double targetVolume, uint32_t volumeDuration, firebolt::rialto::EaseType easeType)
1669 : {
1670 16 : if (m_workerThread)
1671 : {
1672 32 : m_workerThread->enqueueTask(
1673 32 : m_taskFactory->createSetVolume(m_context, *this, targetVolume, volumeDuration, easeType));
1674 : }
1675 16 : }
1676 :
1677 3 : bool GstGenericPlayer::getVolume(double ¤tVolume)
1678 : {
1679 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
1680 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
1681 3 : if (!m_context.pipeline)
1682 : {
1683 0 : return false;
1684 : }
1685 :
1686 : // NOTE: No gstreamer documentation for "fade-volume" could be found at the time this code was written.
1687 : // Therefore the author performed several tests on a supported platform (Flex2) to determine the behaviour of this property.
1688 : // The code has been written to be backwardly compatible on platforms that don't have this property.
1689 : // The observed behaviour was:
1690 : // - if the returned fade volume is negative then audio-fade is not active. In this case the usual technique
1691 : // to find volume in the pipeline works and is used.
1692 : // - if the returned fade volume is positive then audio-fade is active. In this case the returned fade volume
1693 : // directly returns the current volume level 0=min to 100=max (and the pipeline's current volume level is
1694 : // meaningless and doesn't contribute in this case).
1695 3 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1696 5 : if (m_context.audioFadeEnabled && sink &&
1697 2 : m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "fade-volume"))
1698 : {
1699 2 : gint fadeVolume{-100};
1700 2 : m_glibWrapper->gObjectGet(sink, "fade-volume", &fadeVolume, NULL);
1701 2 : if (fadeVolume < 0)
1702 : {
1703 1 : currentVolume = m_gstWrapper->gstStreamVolumeGetVolume(GST_STREAM_VOLUME(m_context.pipeline),
1704 : GST_STREAM_VOLUME_FORMAT_LINEAR);
1705 1 : RIALTO_SERVER_LOG_INFO("Fade volume is negative, using volume from pipeline: %f", currentVolume);
1706 : }
1707 : else
1708 : {
1709 1 : currentVolume = static_cast<double>(fadeVolume) / 100.0;
1710 1 : RIALTO_SERVER_LOG_INFO("Fade volume is supported: %f", currentVolume);
1711 : }
1712 : }
1713 : else
1714 : {
1715 1 : currentVolume = m_gstWrapper->gstStreamVolumeGetVolume(GST_STREAM_VOLUME(m_context.pipeline),
1716 : GST_STREAM_VOLUME_FORMAT_LINEAR);
1717 1 : RIALTO_SERVER_LOG_INFO("Fade volume is not supported, using volume from pipeline: %f", currentVolume);
1718 : }
1719 :
1720 3 : if (sink)
1721 2 : m_gstWrapper->gstObjectUnref(sink);
1722 :
1723 3 : return true;
1724 : }
1725 :
1726 1 : void GstGenericPlayer::setMute(const MediaSourceType &mediaSourceType, bool mute)
1727 : {
1728 1 : if (m_workerThread)
1729 : {
1730 1 : m_workerThread->enqueueTask(m_taskFactory->createSetMute(m_context, *this, mediaSourceType, mute));
1731 : }
1732 : }
1733 :
1734 5 : bool GstGenericPlayer::getMute(const MediaSourceType &mediaSourceType, bool &mute)
1735 : {
1736 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
1737 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
1738 5 : if (mediaSourceType == MediaSourceType::SUBTITLE)
1739 : {
1740 2 : if (!m_context.subtitleSink)
1741 : {
1742 1 : RIALTO_SERVER_LOG_ERROR("There is no subtitle sink");
1743 1 : return false;
1744 : }
1745 1 : gboolean muteValue{FALSE};
1746 1 : m_glibWrapper->gObjectGet(m_context.subtitleSink, "mute", &muteValue, nullptr);
1747 1 : mute = muteValue;
1748 : }
1749 3 : else if (mediaSourceType == MediaSourceType::AUDIO)
1750 : {
1751 2 : if (!m_context.pipeline)
1752 : {
1753 1 : return false;
1754 : }
1755 1 : mute = m_gstWrapper->gstStreamVolumeGetMute(GST_STREAM_VOLUME(m_context.pipeline));
1756 : }
1757 : else
1758 : {
1759 1 : RIALTO_SERVER_LOG_ERROR("Getting mute for type %s unsupported", common::convertMediaSourceType(mediaSourceType));
1760 1 : return false;
1761 : }
1762 :
1763 2 : return true;
1764 : }
1765 :
1766 1 : bool GstGenericPlayer::isAsync(const MediaSourceType &mediaSourceType) const
1767 : {
1768 1 : GstElement *sink = getSink(mediaSourceType);
1769 1 : if (!sink)
1770 : {
1771 0 : RIALTO_SERVER_LOG_WARN("Sink not found for %s", common::convertMediaSourceType(mediaSourceType));
1772 0 : return true; // Our sinks are async by default
1773 : }
1774 1 : gboolean returnValue{TRUE};
1775 1 : m_glibWrapper->gObjectGet(sink, "async", &returnValue, nullptr);
1776 1 : m_gstWrapper->gstObjectUnref(sink);
1777 1 : return returnValue == TRUE;
1778 : }
1779 :
1780 1 : void GstGenericPlayer::setTextTrackIdentifier(const std::string &textTrackIdentifier)
1781 : {
1782 1 : if (m_workerThread)
1783 : {
1784 1 : m_workerThread->enqueueTask(m_taskFactory->createSetTextTrackIdentifier(m_context, textTrackIdentifier));
1785 : }
1786 : }
1787 :
1788 3 : bool GstGenericPlayer::getTextTrackIdentifier(std::string &textTrackIdentifier)
1789 : {
1790 3 : if (!m_context.subtitleSink)
1791 : {
1792 1 : RIALTO_SERVER_LOG_ERROR("There is no subtitle sink");
1793 1 : return false;
1794 : }
1795 :
1796 2 : gchar *identifier = nullptr;
1797 2 : m_glibWrapper->gObjectGet(m_context.subtitleSink, "text-track-identifier", &identifier, nullptr);
1798 :
1799 2 : if (identifier)
1800 : {
1801 1 : textTrackIdentifier = identifier;
1802 1 : m_glibWrapper->gFree(identifier);
1803 1 : return true;
1804 : }
1805 : else
1806 : {
1807 1 : RIALTO_SERVER_LOG_ERROR("Failed to get text track identifier");
1808 1 : return false;
1809 : }
1810 : }
1811 :
1812 1 : bool GstGenericPlayer::setLowLatency(bool lowLatency)
1813 : {
1814 1 : if (m_workerThread)
1815 : {
1816 1 : m_workerThread->enqueueTask(m_taskFactory->createSetLowLatency(m_context, *this, lowLatency));
1817 : }
1818 1 : return true;
1819 : }
1820 :
1821 1 : bool GstGenericPlayer::setSync(bool sync)
1822 : {
1823 1 : if (m_workerThread)
1824 : {
1825 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSync(m_context, *this, sync));
1826 : }
1827 1 : return true;
1828 : }
1829 :
1830 4 : bool GstGenericPlayer::getSync(bool &sync)
1831 : {
1832 4 : bool returnValue{false};
1833 4 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1834 4 : if (sink)
1835 : {
1836 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "sync"))
1837 : {
1838 1 : m_glibWrapper->gObjectGet(sink, "sync", &sync, nullptr);
1839 1 : returnValue = true;
1840 : }
1841 : else
1842 : {
1843 1 : RIALTO_SERVER_LOG_ERROR("Sync not supported in sink '%s'", GST_ELEMENT_NAME(sink));
1844 : }
1845 2 : m_gstWrapper->gstObjectUnref(sink);
1846 : }
1847 2 : else if (m_context.pendingSync.has_value())
1848 : {
1849 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
1850 1 : sync = m_context.pendingSync.value();
1851 1 : returnValue = true;
1852 : }
1853 : else
1854 : {
1855 : // We dont know the default setting on the sync, so return failure here
1856 1 : RIALTO_SERVER_LOG_WARN("No audio sink attached or queued value");
1857 : }
1858 :
1859 4 : return returnValue;
1860 : }
1861 :
1862 1 : bool GstGenericPlayer::setSyncOff(bool syncOff)
1863 : {
1864 1 : if (m_workerThread)
1865 : {
1866 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSyncOff(m_context, *this, syncOff));
1867 : }
1868 1 : return true;
1869 : }
1870 :
1871 1 : bool GstGenericPlayer::setStreamSyncMode(const MediaSourceType &mediaSourceType, int32_t streamSyncMode)
1872 : {
1873 1 : if (m_workerThread)
1874 : {
1875 2 : m_workerThread->enqueueTask(
1876 2 : m_taskFactory->createSetStreamSyncMode(m_context, *this, mediaSourceType, streamSyncMode));
1877 : }
1878 1 : return true;
1879 : }
1880 :
1881 5 : bool GstGenericPlayer::getStreamSyncMode(int32_t &streamSyncMode)
1882 : {
1883 5 : bool returnValue{false};
1884 5 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1885 5 : if (decoder && m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "stream-sync-mode"))
1886 : {
1887 2 : m_glibWrapper->gObjectGet(decoder, "stream-sync-mode", &streamSyncMode, nullptr);
1888 2 : returnValue = true;
1889 : }
1890 : else
1891 : {
1892 3 : std::unique_lock lock{m_context.propertyMutex};
1893 3 : if (m_context.pendingStreamSyncMode.find(MediaSourceType::AUDIO) != m_context.pendingStreamSyncMode.end())
1894 : {
1895 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
1896 1 : streamSyncMode = m_context.pendingStreamSyncMode[MediaSourceType::AUDIO];
1897 1 : returnValue = true;
1898 : }
1899 : else
1900 : {
1901 2 : RIALTO_SERVER_LOG_ERROR("Stream sync mode not supported in decoder '%s'",
1902 : (decoder ? GST_ELEMENT_NAME(decoder) : "null"));
1903 : }
1904 3 : }
1905 :
1906 5 : if (decoder)
1907 3 : m_gstWrapper->gstObjectUnref(GST_OBJECT(decoder));
1908 :
1909 5 : return returnValue;
1910 : }
1911 :
1912 1 : void GstGenericPlayer::ping(std::unique_ptr<IHeartbeatHandler> &&heartbeatHandler)
1913 : {
1914 1 : if (m_workerThread)
1915 : {
1916 1 : m_workerThread->enqueueTask(m_taskFactory->createPing(std::move(heartbeatHandler)));
1917 : }
1918 : }
1919 :
1920 1 : void GstGenericPlayer::flush(const MediaSourceType &mediaSourceType, bool resetTime, bool &async)
1921 : {
1922 1 : if (m_workerThread)
1923 : {
1924 1 : async = isAsync(mediaSourceType);
1925 1 : m_flushWatcher->setFlushing(mediaSourceType, async);
1926 1 : m_workerThread->enqueueTask(m_taskFactory->createFlush(m_context, *this, mediaSourceType, resetTime));
1927 : }
1928 : }
1929 :
1930 1 : void GstGenericPlayer::setSourcePosition(const MediaSourceType &mediaSourceType, int64_t position, bool resetTime,
1931 : double appliedRate, uint64_t stopPosition)
1932 : {
1933 1 : if (m_workerThread)
1934 : {
1935 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSourcePosition(m_context, *this, mediaSourceType, position,
1936 : resetTime, appliedRate, stopPosition));
1937 : }
1938 : }
1939 :
1940 1 : void GstGenericPlayer::processAudioGap(int64_t position, uint32_t duration, int64_t discontinuityGap, bool audioAac)
1941 : {
1942 1 : if (m_workerThread)
1943 : {
1944 2 : m_workerThread->enqueueTask(
1945 2 : m_taskFactory->createProcessAudioGap(m_context, position, duration, discontinuityGap, audioAac));
1946 : }
1947 1 : }
1948 :
1949 1 : void GstGenericPlayer::setBufferingLimit(uint32_t limitBufferingMs)
1950 : {
1951 1 : if (m_workerThread)
1952 : {
1953 1 : m_workerThread->enqueueTask(m_taskFactory->createSetBufferingLimit(m_context, *this, limitBufferingMs));
1954 : }
1955 : }
1956 :
1957 5 : bool GstGenericPlayer::getBufferingLimit(uint32_t &limitBufferingMs)
1958 : {
1959 5 : bool returnValue{false};
1960 5 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1961 5 : if (decoder && m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "limit-buffering-ms"))
1962 : {
1963 2 : m_glibWrapper->gObjectGet(decoder, "limit-buffering-ms", &limitBufferingMs, nullptr);
1964 2 : returnValue = true;
1965 : }
1966 : else
1967 : {
1968 3 : std::unique_lock lock{m_context.propertyMutex};
1969 3 : if (m_context.pendingBufferingLimit.has_value())
1970 : {
1971 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
1972 1 : limitBufferingMs = m_context.pendingBufferingLimit.value();
1973 1 : returnValue = true;
1974 : }
1975 : else
1976 : {
1977 2 : RIALTO_SERVER_LOG_ERROR("buffering limit not supported in decoder '%s'",
1978 : (decoder ? GST_ELEMENT_NAME(decoder) : "null"));
1979 : }
1980 3 : }
1981 :
1982 5 : if (decoder)
1983 3 : m_gstWrapper->gstObjectUnref(GST_OBJECT(decoder));
1984 :
1985 5 : return returnValue;
1986 : }
1987 :
1988 1 : void GstGenericPlayer::setUseBuffering(bool useBuffering)
1989 : {
1990 1 : if (m_workerThread)
1991 : {
1992 1 : m_workerThread->enqueueTask(m_taskFactory->createSetUseBuffering(m_context, *this, useBuffering));
1993 : }
1994 : }
1995 :
1996 3 : bool GstGenericPlayer::getUseBuffering(bool &useBuffering)
1997 : {
1998 3 : if (m_context.playbackGroup.m_curAudioDecodeBin)
1999 : {
2000 1 : m_glibWrapper->gObjectGet(m_context.playbackGroup.m_curAudioDecodeBin, "use-buffering", &useBuffering, nullptr);
2001 1 : return true;
2002 : }
2003 : else
2004 : {
2005 2 : std::unique_lock lock{m_context.propertyMutex};
2006 2 : if (m_context.pendingUseBuffering.has_value())
2007 : {
2008 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
2009 1 : useBuffering = m_context.pendingUseBuffering.value();
2010 1 : return true;
2011 : }
2012 2 : }
2013 1 : return false;
2014 : }
2015 :
2016 1 : void GstGenericPlayer::switchSource(const std::unique_ptr<IMediaPipeline::MediaSource> &mediaSource)
2017 : {
2018 1 : if (m_workerThread)
2019 : {
2020 1 : m_workerThread->enqueueTask(m_taskFactory->createSwitchSource(*this, mediaSource));
2021 : }
2022 : }
2023 :
2024 1 : void GstGenericPlayer::handleBusMessage(GstMessage *message)
2025 : {
2026 1 : m_workerThread->enqueueTask(m_taskFactory->createHandleBusMessage(m_context, *this, message, *m_flushWatcher));
2027 : }
2028 :
2029 1 : void GstGenericPlayer::updatePlaybackGroup(GstElement *typefind, const GstCaps *caps)
2030 : {
2031 1 : m_workerThread->enqueueTask(m_taskFactory->createUpdatePlaybackGroup(m_context, *this, typefind, caps));
2032 : }
2033 :
2034 3 : void GstGenericPlayer::addAutoVideoSinkChild(GObject *object)
2035 : {
2036 : // Only add children that are sinks
2037 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2038 : {
2039 2 : RIALTO_SERVER_LOG_DEBUG("Store AutoVideoSink child sink");
2040 :
2041 2 : if (m_context.autoVideoChildSink && m_context.autoVideoChildSink != GST_ELEMENT(object))
2042 : {
2043 1 : RIALTO_SERVER_LOG_MIL("AutoVideoSink child is been overwritten");
2044 : }
2045 2 : m_context.autoVideoChildSink = GST_ELEMENT(object);
2046 : }
2047 3 : }
2048 :
2049 3 : void GstGenericPlayer::addAutoAudioSinkChild(GObject *object)
2050 : {
2051 : // Only add children that are sinks
2052 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2053 : {
2054 2 : RIALTO_SERVER_LOG_DEBUG("Store AutoAudioSink child sink");
2055 :
2056 2 : if (m_context.autoAudioChildSink && m_context.autoAudioChildSink != GST_ELEMENT(object))
2057 : {
2058 1 : RIALTO_SERVER_LOG_MIL("AutoAudioSink child is been overwritten");
2059 : }
2060 2 : m_context.autoAudioChildSink = GST_ELEMENT(object);
2061 : }
2062 3 : }
2063 :
2064 3 : void GstGenericPlayer::removeAutoVideoSinkChild(GObject *object)
2065 : {
2066 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2067 : {
2068 3 : RIALTO_SERVER_LOG_DEBUG("Remove AutoVideoSink child sink");
2069 :
2070 3 : if (m_context.autoVideoChildSink && m_context.autoVideoChildSink != GST_ELEMENT(object))
2071 : {
2072 1 : RIALTO_SERVER_LOG_MIL("AutoVideoSink child sink is not the same as the one stored");
2073 1 : return;
2074 : }
2075 :
2076 2 : m_context.autoVideoChildSink = nullptr;
2077 : }
2078 : }
2079 :
2080 3 : void GstGenericPlayer::removeAutoAudioSinkChild(GObject *object)
2081 : {
2082 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2083 : {
2084 3 : RIALTO_SERVER_LOG_DEBUG("Remove AutoAudioSink child sink");
2085 :
2086 3 : if (m_context.autoAudioChildSink && m_context.autoAudioChildSink != GST_ELEMENT(object))
2087 : {
2088 1 : RIALTO_SERVER_LOG_MIL("AutoAudioSink child sink is not the same as the one stored");
2089 1 : return;
2090 : }
2091 :
2092 2 : m_context.autoAudioChildSink = nullptr;
2093 : }
2094 : }
2095 :
2096 14 : GstElement *GstGenericPlayer::getSinkChildIfAutoVideoSink(GstElement *sink) const
2097 : {
2098 14 : const gchar *kTmpName = m_glibWrapper->gTypeName(G_OBJECT_TYPE(sink));
2099 14 : if (!kTmpName)
2100 0 : return sink;
2101 :
2102 28 : const std::string kElementTypeName{kTmpName};
2103 14 : if (kElementTypeName == "GstAutoVideoSink")
2104 : {
2105 1 : if (!m_context.autoVideoChildSink)
2106 : {
2107 0 : RIALTO_SERVER_LOG_WARN("No child sink has been added to the autovideosink");
2108 : }
2109 : else
2110 : {
2111 1 : return m_context.autoVideoChildSink;
2112 : }
2113 : }
2114 13 : return sink;
2115 14 : }
2116 :
2117 11 : GstElement *GstGenericPlayer::getSinkChildIfAutoAudioSink(GstElement *sink) const
2118 : {
2119 11 : const gchar *kTmpName = m_glibWrapper->gTypeName(G_OBJECT_TYPE(sink));
2120 11 : if (!kTmpName)
2121 0 : return sink;
2122 :
2123 22 : const std::string kElementTypeName{kTmpName};
2124 11 : if (kElementTypeName == "GstAutoAudioSink")
2125 : {
2126 1 : if (!m_context.autoAudioChildSink)
2127 : {
2128 0 : RIALTO_SERVER_LOG_WARN("No child sink has been added to the autoaudiosink");
2129 : }
2130 : else
2131 : {
2132 1 : return m_context.autoAudioChildSink;
2133 : }
2134 : }
2135 10 : return sink;
2136 11 : }
2137 :
2138 206 : void GstGenericPlayer::setPlaybinFlags(bool enableAudio)
2139 : {
2140 206 : unsigned flags = getGstPlayFlag("video") | getGstPlayFlag("native-video") | getGstPlayFlag("text");
2141 :
2142 206 : if (enableAudio)
2143 : {
2144 206 : flags |= getGstPlayFlag("audio");
2145 206 : flags |= shouldEnableNativeAudio() ? getGstPlayFlag("native-audio") : 0;
2146 : }
2147 :
2148 206 : m_glibWrapper->gObjectSet(m_context.pipeline, "flags", flags, nullptr);
2149 : }
2150 :
2151 206 : bool GstGenericPlayer::shouldEnableNativeAudio()
2152 : {
2153 206 : GstElementFactory *factory = m_gstWrapper->gstElementFactoryFind("brcmaudiosink");
2154 206 : if (factory)
2155 : {
2156 1 : m_gstWrapper->gstObjectUnref(GST_OBJECT(factory));
2157 1 : return true;
2158 : }
2159 205 : return false;
2160 : }
2161 :
2162 : }; // namespace firebolt::rialto::server
|