Line data Source code
1 : /*
2 : * If not stated otherwise in this file or this component's LICENSE file the
3 : * following copyright and licenses apply:
4 : *
5 : * Copyright 2022 Sky UK
6 : *
7 : * Licensed under the Apache License, Version 2.0 (the "License");
8 : * you may not use this file except in compliance with the License.
9 : * You may obtain a copy of the License at
10 : *
11 : * http://www.apache.org/licenses/LICENSE-2.0
12 : *
13 : * Unless required by applicable law or agreed to in writing, software
14 : * distributed under the License is distributed on an "AS IS" BASIS,
15 : * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 : * See the License for the specific language governing permissions and
17 : * limitations under the License.
18 : */
19 :
20 : #include <chrono>
21 : #include <cinttypes>
22 : #include <cstring>
23 : #include <ctime>
24 : #include <stdexcept>
25 :
26 : #include "FlushWatcher.h"
27 : #include "GstDispatcherThread.h"
28 : #include "GstGenericPlayer.h"
29 : #include "GstProtectionMetadata.h"
30 : #include "IGstTextTrackSinkFactory.h"
31 : #include "IMediaPipeline.h"
32 : #include "ITimer.h"
33 : #include "RialtoServerLogging.h"
34 : #include "TypeConverters.h"
35 : #include "Utils.h"
36 : #include "WorkerThread.h"
37 : #include "tasks/generic/GenericPlayerTaskFactory.h"
38 :
39 : namespace
40 : {
41 : /**
42 : * @brief Report position interval in ms.
43 : * The position reporting timer should be started whenever the PLAYING state is entered and stopped
44 : * whenever the session moves to another playback state.
45 : */
46 : constexpr std::chrono::milliseconds kPositionReportTimerMs{250};
47 : constexpr std::chrono::seconds kSubtitleClockResyncInterval{10};
48 :
49 1 : bool operator==(const firebolt::rialto::server::SegmentData &lhs, const firebolt::rialto::server::SegmentData &rhs)
50 : {
51 2 : return (lhs.position == rhs.position) && (lhs.resetTime == rhs.resetTime) && (lhs.appliedRate == rhs.appliedRate) &&
52 2 : (lhs.stopPosition == rhs.stopPosition);
53 : }
54 : } // namespace
55 :
56 : namespace firebolt::rialto::server
57 : {
58 : std::weak_ptr<IGstGenericPlayerFactory> GstGenericPlayerFactory::m_factory;
59 :
60 3 : std::shared_ptr<IGstGenericPlayerFactory> IGstGenericPlayerFactory::getFactory()
61 : {
62 3 : std::shared_ptr<IGstGenericPlayerFactory> factory = GstGenericPlayerFactory::m_factory.lock();
63 :
64 3 : if (!factory)
65 : {
66 : try
67 : {
68 3 : factory = std::make_shared<GstGenericPlayerFactory>();
69 : }
70 0 : catch (const std::exception &e)
71 : {
72 0 : RIALTO_SERVER_LOG_ERROR("Failed to create the gstreamer player factory, reason: %s", e.what());
73 : }
74 :
75 3 : GstGenericPlayerFactory::m_factory = factory;
76 : }
77 :
78 3 : return factory;
79 : }
80 :
81 1 : std::unique_ptr<IGstGenericPlayer> GstGenericPlayerFactory::createGstGenericPlayer(
82 : IGstGenericPlayerClient *client, IDecryptionService &decryptionService, MediaType type,
83 : const VideoRequirements &videoRequirements,
84 : const std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapperFactory> &rdkGstreamerUtilsWrapperFactory)
85 : {
86 1 : std::unique_ptr<IGstGenericPlayer> gstPlayer;
87 :
88 : try
89 : {
90 1 : auto gstWrapperFactory = firebolt::rialto::wrappers::IGstWrapperFactory::getFactory();
91 1 : auto glibWrapperFactory = firebolt::rialto::wrappers::IGlibWrapperFactory::getFactory();
92 1 : std::shared_ptr<firebolt::rialto::wrappers::IGstWrapper> gstWrapper;
93 1 : std::shared_ptr<firebolt::rialto::wrappers::IGlibWrapper> glibWrapper;
94 1 : std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapper> rdkGstreamerUtilsWrapper;
95 1 : if ((!gstWrapperFactory) || (!(gstWrapper = gstWrapperFactory->getGstWrapper())))
96 : {
97 0 : throw std::runtime_error("Cannot create GstWrapper");
98 : }
99 1 : if ((!glibWrapperFactory) || (!(glibWrapper = glibWrapperFactory->getGlibWrapper())))
100 : {
101 0 : throw std::runtime_error("Cannot create GlibWrapper");
102 : }
103 2 : if ((!rdkGstreamerUtilsWrapperFactory) ||
104 2 : (!(rdkGstreamerUtilsWrapper = rdkGstreamerUtilsWrapperFactory->createRdkGstreamerUtilsWrapper())))
105 : {
106 0 : throw std::runtime_error("Cannot create RdkGstreamerUtilsWrapper");
107 : }
108 : gstPlayer = std::make_unique<
109 2 : GstGenericPlayer>(client, decryptionService, type, videoRequirements, gstWrapper, glibWrapper,
110 2 : rdkGstreamerUtilsWrapper, IGstInitialiser::instance(), std::make_unique<FlushWatcher>(),
111 2 : IGstSrcFactory::getFactory(), common::ITimerFactory::getFactory(),
112 2 : std::make_unique<GenericPlayerTaskFactory>(client, gstWrapper, glibWrapper,
113 : rdkGstreamerUtilsWrapper,
114 2 : IGstTextTrackSinkFactory::createFactory()),
115 2 : std::make_unique<WorkerThreadFactory>(), std::make_unique<GstDispatcherThreadFactory>(),
116 3 : IGstProtectionMetadataHelperFactory::createFactory());
117 1 : }
118 0 : catch (const std::exception &e)
119 : {
120 0 : RIALTO_SERVER_LOG_ERROR("Failed to create the gstreamer player, reason: %s", e.what());
121 : }
122 :
123 1 : return gstPlayer;
124 : }
125 :
126 224 : GstGenericPlayer::GstGenericPlayer(
127 : IGstGenericPlayerClient *client, IDecryptionService &decryptionService, MediaType type,
128 : const VideoRequirements &videoRequirements,
129 : const std::shared_ptr<firebolt::rialto::wrappers::IGstWrapper> &gstWrapper,
130 : const std::shared_ptr<firebolt::rialto::wrappers::IGlibWrapper> &glibWrapper,
131 : const std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapper> &rdkGstreamerUtilsWrapper,
132 : const IGstInitialiser &gstInitialiser, std::unique_ptr<IFlushWatcher> &&flushWatcher,
133 : const std::shared_ptr<IGstSrcFactory> &gstSrcFactory, std::shared_ptr<common::ITimerFactory> timerFactory,
134 : std::unique_ptr<IGenericPlayerTaskFactory> taskFactory, std::unique_ptr<IWorkerThreadFactory> workerThreadFactory,
135 : std::unique_ptr<IGstDispatcherThreadFactory> gstDispatcherThreadFactory,
136 224 : std::shared_ptr<IGstProtectionMetadataHelperFactory> gstProtectionMetadataFactory)
137 224 : : m_gstPlayerClient(client), m_gstWrapper{gstWrapper}, m_glibWrapper{glibWrapper},
138 448 : m_rdkGstreamerUtilsWrapper{rdkGstreamerUtilsWrapper}, m_timerFactory{timerFactory},
139 672 : m_taskFactory{std::move(taskFactory)}, m_flushWatcher{std::move(flushWatcher)}
140 : {
141 224 : RIALTO_SERVER_LOG_DEBUG("GstGenericPlayer is constructed.");
142 :
143 224 : gstInitialiser.waitForInitialisation();
144 :
145 224 : m_context.decryptionService = &decryptionService;
146 :
147 224 : if ((!gstSrcFactory) || (!(m_context.gstSrc = gstSrcFactory->getGstSrc())))
148 : {
149 2 : throw std::runtime_error("Cannot create GstSrc");
150 : }
151 :
152 222 : if (!timerFactory)
153 : {
154 1 : throw std::runtime_error("TimeFactory is invalid");
155 : }
156 :
157 442 : if ((!gstProtectionMetadataFactory) ||
158 442 : (!(m_protectionMetadataWrapper = gstProtectionMetadataFactory->createProtectionMetadataWrapper(m_gstWrapper))))
159 : {
160 0 : throw std::runtime_error("Cannot create protection metadata wrapper");
161 : }
162 :
163 : // Ensure that rialtosrc has been initalised
164 221 : m_context.gstSrc->initSrc();
165 :
166 : // Start task thread
167 221 : if ((!workerThreadFactory) || (!(m_workerThread = workerThreadFactory->createWorkerThread())))
168 : {
169 0 : throw std::runtime_error("Failed to create the worker thread");
170 : }
171 :
172 : // Initialise pipeline
173 221 : switch (type)
174 : {
175 220 : case MediaType::MSE:
176 : {
177 220 : initMsePipeline();
178 220 : break;
179 : }
180 1 : default:
181 : {
182 1 : resetWorkerThread();
183 1 : throw std::runtime_error("Media type not supported");
184 : }
185 : }
186 :
187 : // Check the video requirements for a limited video.
188 : // If the video requirements are set to anything lower than the minimum, this playback is assumed to be a secondary
189 : // video in a dual video scenario.
190 220 : if ((kMinPrimaryVideoWidth > videoRequirements.maxWidth) || (kMinPrimaryVideoHeight > videoRequirements.maxHeight))
191 : {
192 8 : RIALTO_SERVER_LOG_MIL("Secondary video playback selected");
193 8 : bool westerossinkSecondaryVideoResult = setWesterossinkSecondaryVideo();
194 8 : bool ermContextResult = setErmContext();
195 8 : if (!westerossinkSecondaryVideoResult && !ermContextResult)
196 : {
197 1 : resetWorkerThread();
198 1 : termPipeline();
199 1 : throw std::runtime_error("Could not set secondary video");
200 : }
201 7 : }
202 : else
203 : {
204 212 : RIALTO_SERVER_LOG_MIL("Primary video playback selected");
205 : }
206 :
207 438 : m_gstDispatcherThread = gstDispatcherThreadFactory->createGstDispatcherThread(*this, m_context.pipeline,
208 219 : m_context.flushOnPrerollController,
209 219 : m_gstWrapper);
210 304 : }
211 :
212 438 : GstGenericPlayer::~GstGenericPlayer()
213 : {
214 219 : RIALTO_SERVER_LOG_DEBUG("GstGenericPlayer is destructed.");
215 219 : m_gstDispatcherThread.reset();
216 :
217 219 : resetWorkerThread();
218 :
219 219 : termPipeline();
220 438 : }
221 :
222 220 : void GstGenericPlayer::initMsePipeline()
223 : {
224 : // Make playbin
225 220 : m_context.pipeline = m_gstWrapper->gstElementFactoryMake("playbin", "media_pipeline");
226 : // Set pipeline flags
227 220 : setPlaybinFlags(true);
228 :
229 : // Set callbacks
230 220 : m_glibWrapper->gSignalConnect(m_context.pipeline, "source-setup", G_CALLBACK(&GstGenericPlayer::setupSource), this);
231 220 : m_glibWrapper->gSignalConnect(m_context.pipeline, "element-setup", G_CALLBACK(&GstGenericPlayer::setupElement), this);
232 220 : m_glibWrapper->gSignalConnect(m_context.pipeline, "deep-element-added",
233 : G_CALLBACK(&GstGenericPlayer::deepElementAdded), this);
234 :
235 : // Set uri
236 220 : m_glibWrapper->gObjectSet(m_context.pipeline, "uri", "rialto://", nullptr);
237 :
238 : // Check playsink
239 220 : GstElement *playsink = (m_gstWrapper->gstBinGetByName(GST_BIN(m_context.pipeline), "playsink"));
240 220 : if (playsink)
241 : {
242 219 : m_glibWrapper->gObjectSet(G_OBJECT(playsink), "send-event-mode", 0, nullptr);
243 219 : m_gstWrapper->gstObjectUnref(playsink);
244 : }
245 : else
246 : {
247 1 : GST_WARNING("No playsink ?!?!?");
248 : }
249 220 : if (GST_STATE_CHANGE_FAILURE == m_gstWrapper->gstElementSetState(m_context.pipeline, GST_STATE_READY))
250 : {
251 1 : GST_WARNING("Failed to set pipeline to READY state");
252 : }
253 220 : RIALTO_SERVER_LOG_MIL("New RialtoServer's pipeline created");
254 : }
255 :
256 221 : void GstGenericPlayer::resetWorkerThread()
257 : {
258 : // Shutdown task thread
259 221 : m_workerThread->enqueueTask(m_taskFactory->createShutdown(*this));
260 221 : m_workerThread->join();
261 221 : m_workerThread.reset();
262 : }
263 :
264 220 : void GstGenericPlayer::termPipeline()
265 : {
266 220 : if (m_finishSourceSetupTimer && m_finishSourceSetupTimer->isActive())
267 : {
268 0 : m_finishSourceSetupTimer->cancel();
269 : }
270 :
271 220 : m_finishSourceSetupTimer.reset();
272 :
273 271 : for (auto &elem : m_context.streamInfo)
274 : {
275 51 : StreamInfo &streamInfo = elem.second;
276 53 : for (auto &buffer : streamInfo.buffers)
277 : {
278 2 : m_gstWrapper->gstBufferUnref(buffer);
279 : }
280 :
281 51 : streamInfo.buffers.clear();
282 : }
283 :
284 220 : m_taskFactory->createStop(m_context, *this)->execute();
285 220 : GstBus *bus = m_gstWrapper->gstPipelineGetBus(GST_PIPELINE(m_context.pipeline));
286 220 : m_gstWrapper->gstBusSetSyncHandler(bus, nullptr, nullptr, nullptr);
287 220 : m_gstWrapper->gstObjectUnref(bus);
288 :
289 220 : if (m_context.source)
290 : {
291 1 : m_gstWrapper->gstObjectUnref(m_context.source);
292 : }
293 220 : if (m_context.subtitleSink)
294 : {
295 4 : m_gstWrapper->gstObjectUnref(m_context.subtitleSink);
296 4 : m_context.subtitleSink = nullptr;
297 : }
298 :
299 220 : if (m_context.videoSink)
300 : {
301 0 : m_gstWrapper->gstObjectUnref(m_context.videoSink);
302 0 : m_context.videoSink = nullptr;
303 : }
304 220 : if (m_context.playbackGroup.m_curAudioPlaysinkBin)
305 : {
306 1 : m_gstWrapper->gstObjectUnref(m_context.playbackGroup.m_curAudioPlaysinkBin);
307 1 : m_context.playbackGroup.m_curAudioPlaysinkBin = nullptr;
308 : }
309 :
310 : // Delete the pipeline
311 220 : m_gstWrapper->gstObjectUnref(m_context.pipeline);
312 :
313 220 : RIALTO_SERVER_LOG_MIL("RialtoServer's pipeline terminated");
314 : }
315 :
316 881 : unsigned GstGenericPlayer::getGstPlayFlag(const char *nick)
317 : {
318 : GFlagsClass *flagsClass =
319 881 : static_cast<GFlagsClass *>(m_glibWrapper->gTypeClassRef(m_glibWrapper->gTypeFromName("GstPlayFlags")));
320 881 : GFlagsValue *flag = m_glibWrapper->gFlagsGetValueByNick(flagsClass, nick);
321 881 : return flag ? flag->value : 0;
322 : }
323 :
324 1 : void GstGenericPlayer::setupSource(GstElement *pipeline, GstElement *source, GstGenericPlayer *self)
325 : {
326 1 : self->m_gstWrapper->gstObjectRef(source);
327 1 : if (self->m_workerThread)
328 : {
329 1 : self->m_workerThread->enqueueTask(self->m_taskFactory->createSetupSource(self->m_context, *self, source));
330 : }
331 : }
332 :
333 1 : void GstGenericPlayer::setupElement(GstElement *pipeline, GstElement *element, GstGenericPlayer *self)
334 : {
335 1 : RIALTO_SERVER_LOG_DEBUG("Element %s added to the pipeline", GST_ELEMENT_NAME(element));
336 1 : self->m_gstWrapper->gstObjectRef(element);
337 1 : if (self->m_workerThread)
338 : {
339 1 : self->m_workerThread->enqueueTask(self->m_taskFactory->createSetupElement(self->m_context, *self, element));
340 : }
341 : }
342 :
343 1 : void GstGenericPlayer::deepElementAdded(GstBin *pipeline, GstBin *bin, GstElement *element, GstGenericPlayer *self)
344 : {
345 1 : RIALTO_SERVER_LOG_DEBUG("Deep element %s added to the pipeline", GST_ELEMENT_NAME(element));
346 1 : if (self->m_workerThread)
347 : {
348 2 : self->m_workerThread->enqueueTask(
349 2 : self->m_taskFactory->createDeepElementAdded(self->m_context, *self, pipeline, bin, element));
350 : }
351 1 : }
352 :
353 1 : void GstGenericPlayer::attachSource(const std::unique_ptr<IMediaPipeline::MediaSource> &attachedSource)
354 : {
355 1 : if (m_workerThread)
356 : {
357 1 : m_workerThread->enqueueTask(m_taskFactory->createAttachSource(m_context, *this, attachedSource));
358 : }
359 : }
360 :
361 2 : void GstGenericPlayer::allSourcesAttached()
362 : {
363 2 : if (m_workerThread)
364 : {
365 2 : m_workerThread->enqueueTask(m_taskFactory->createFinishSetupSource(m_context, *this));
366 : }
367 : }
368 :
369 1 : void GstGenericPlayer::attachSamples(const IMediaPipeline::MediaSegmentVector &mediaSegments)
370 : {
371 1 : if (m_workerThread)
372 : {
373 1 : m_workerThread->enqueueTask(m_taskFactory->createAttachSamples(m_context, *this, mediaSegments));
374 : }
375 : }
376 :
377 1 : void GstGenericPlayer::attachSamples(const std::shared_ptr<IDataReader> &dataReader)
378 : {
379 1 : if (m_workerThread)
380 : {
381 1 : m_workerThread->enqueueTask(m_taskFactory->createReadShmDataAndAttachSamples(m_context, *this, dataReader));
382 : }
383 : }
384 :
385 1 : void GstGenericPlayer::setPosition(std::int64_t position)
386 : {
387 1 : if (m_workerThread)
388 : {
389 1 : m_workerThread->enqueueTask(m_taskFactory->createSetPosition(m_context, *this, position));
390 : }
391 : }
392 :
393 1 : void GstGenericPlayer::setPlaybackRate(double rate)
394 : {
395 1 : if (m_workerThread)
396 : {
397 1 : m_workerThread->enqueueTask(m_taskFactory->createSetPlaybackRate(m_context, rate));
398 : }
399 : }
400 :
401 11 : bool GstGenericPlayer::getPosition(std::int64_t &position)
402 : {
403 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
404 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
405 11 : position = getPosition(m_context.pipeline);
406 11 : if (position == -1)
407 : {
408 3 : return false;
409 : }
410 :
411 8 : return true;
412 : }
413 :
414 50 : GstElement *GstGenericPlayer::getSink(const MediaSourceType &mediaSourceType) const
415 : {
416 50 : const char *kSinkName{nullptr};
417 50 : GstElement *sink{nullptr};
418 50 : switch (mediaSourceType)
419 : {
420 29 : case MediaSourceType::AUDIO:
421 29 : kSinkName = "audio-sink";
422 29 : break;
423 18 : case MediaSourceType::VIDEO:
424 18 : kSinkName = "video-sink";
425 18 : break;
426 1 : case MediaSourceType::SUBTITLE:
427 1 : kSinkName = "text-sink";
428 1 : break;
429 2 : default:
430 2 : break;
431 : }
432 50 : if (!kSinkName)
433 : {
434 2 : RIALTO_SERVER_LOG_WARN("mediaSourceType not supported %d", static_cast<int>(mediaSourceType));
435 : }
436 : else
437 : {
438 48 : if (m_context.pipeline == nullptr)
439 : {
440 0 : RIALTO_SERVER_LOG_WARN("Pipeline is NULL!");
441 : }
442 : else
443 : {
444 48 : RIALTO_SERVER_LOG_DEBUG("Pipeline is valid: %p", m_context.pipeline);
445 : }
446 48 : m_glibWrapper->gObjectGet(m_context.pipeline, kSinkName, &sink, nullptr);
447 48 : if (sink && firebolt::rialto::MediaSourceType::SUBTITLE != mediaSourceType)
448 : {
449 30 : GstElement *autoSink{sink};
450 30 : if (firebolt::rialto::MediaSourceType::VIDEO == mediaSourceType)
451 14 : autoSink = getSinkChildIfAutoVideoSink(sink);
452 16 : else if (firebolt::rialto::MediaSourceType::AUDIO == mediaSourceType)
453 16 : autoSink = getSinkChildIfAutoAudioSink(sink);
454 :
455 : // Is this an auto-sink?...
456 30 : if (autoSink != sink)
457 : {
458 2 : m_gstWrapper->gstObjectUnref(GST_OBJECT(sink));
459 :
460 : // increase the reference count of the auto sink
461 2 : sink = GST_ELEMENT(m_gstWrapper->gstObjectRef(GST_OBJECT(autoSink)));
462 : }
463 : }
464 : }
465 50 : return sink;
466 : }
467 :
468 1 : void GstGenericPlayer::setSourceFlushed(const MediaSourceType &mediaSourceType)
469 : {
470 1 : m_flushWatcher->setFlushed(mediaSourceType);
471 : }
472 :
473 6 : void GstGenericPlayer::notifyPlaybackInfo()
474 : {
475 6 : PlaybackInfo info;
476 6 : getPosition(info.currentPosition);
477 6 : getVolume(info.volume);
478 6 : m_gstPlayerClient->notifyPlaybackInfo(info);
479 : }
480 :
481 23 : GstElement *GstGenericPlayer::getDecoder(const MediaSourceType &mediaSourceType)
482 : {
483 23 : GstIterator *it = m_gstWrapper->gstBinIterateRecurse(GST_BIN(m_context.pipeline));
484 23 : GValue item = G_VALUE_INIT;
485 23 : gboolean done = FALSE;
486 :
487 32 : while (!done)
488 : {
489 25 : switch (m_gstWrapper->gstIteratorNext(it, &item))
490 : {
491 16 : case GST_ITERATOR_OK:
492 : {
493 16 : GstElement *element = GST_ELEMENT(m_glibWrapper->gValueGetObject(&item));
494 16 : GstElementFactory *factory = m_gstWrapper->gstElementGetFactory(element);
495 :
496 16 : if (factory)
497 : {
498 16 : GstElementFactoryListType type = GST_ELEMENT_FACTORY_TYPE_DECODER;
499 16 : if (mediaSourceType == MediaSourceType::AUDIO)
500 : {
501 12 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO;
502 : }
503 4 : else if (mediaSourceType == MediaSourceType::VIDEO)
504 : {
505 4 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO;
506 : }
507 :
508 16 : if (m_gstWrapper->gstElementFactoryListIsType(factory, type))
509 : {
510 16 : m_glibWrapper->gValueUnset(&item);
511 16 : m_gstWrapper->gstIteratorFree(it);
512 16 : return GST_ELEMENT(m_gstWrapper->gstObjectRef(element));
513 : }
514 : }
515 :
516 0 : m_glibWrapper->gValueUnset(&item);
517 0 : break;
518 : }
519 2 : case GST_ITERATOR_RESYNC:
520 2 : m_gstWrapper->gstIteratorResync(it);
521 2 : break;
522 7 : case GST_ITERATOR_ERROR:
523 : case GST_ITERATOR_DONE:
524 7 : done = TRUE;
525 7 : break;
526 : }
527 : }
528 :
529 7 : RIALTO_SERVER_LOG_WARN("Could not find decoder");
530 :
531 7 : m_glibWrapper->gValueUnset(&item);
532 7 : m_gstWrapper->gstIteratorFree(it);
533 :
534 7 : return nullptr;
535 : }
536 :
537 3 : GstElement *GstGenericPlayer::getParser(const MediaSourceType &mediaSourceType)
538 : {
539 3 : GstIterator *it = m_gstWrapper->gstBinIterateRecurse(GST_BIN(m_context.pipeline));
540 3 : GValue item = G_VALUE_INIT;
541 3 : gboolean done = FALSE;
542 :
543 4 : while (!done)
544 : {
545 3 : switch (m_gstWrapper->gstIteratorNext(it, &item))
546 : {
547 2 : case GST_ITERATOR_OK:
548 : {
549 2 : GstElement *element = GST_ELEMENT(m_glibWrapper->gValueGetObject(&item));
550 2 : GstElementFactory *factory = m_gstWrapper->gstElementGetFactory(element);
551 :
552 2 : if (factory)
553 : {
554 2 : GstElementFactoryListType type = GST_ELEMENT_FACTORY_TYPE_PARSER;
555 2 : if (mediaSourceType == MediaSourceType::AUDIO)
556 : {
557 0 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO;
558 : }
559 2 : else if (mediaSourceType == MediaSourceType::VIDEO)
560 : {
561 2 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO;
562 : }
563 :
564 2 : if (m_gstWrapper->gstElementFactoryListIsType(factory, type))
565 : {
566 2 : m_glibWrapper->gValueUnset(&item);
567 2 : m_gstWrapper->gstIteratorFree(it);
568 2 : return GST_ELEMENT(m_gstWrapper->gstObjectRef(element));
569 : }
570 : }
571 :
572 0 : m_glibWrapper->gValueUnset(&item);
573 0 : break;
574 : }
575 0 : case GST_ITERATOR_RESYNC:
576 0 : m_gstWrapper->gstIteratorResync(it);
577 0 : break;
578 1 : case GST_ITERATOR_ERROR:
579 : case GST_ITERATOR_DONE:
580 1 : done = TRUE;
581 1 : break;
582 : }
583 : }
584 :
585 1 : RIALTO_SERVER_LOG_WARN("Could not find parser");
586 :
587 1 : m_glibWrapper->gValueUnset(&item);
588 1 : m_gstWrapper->gstIteratorFree(it);
589 :
590 1 : return nullptr;
591 : }
592 :
593 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate>
594 7 : GstGenericPlayer::createAudioAttributes(const std::unique_ptr<IMediaPipeline::MediaSource> &source) const
595 : {
596 7 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate> audioAttributes;
597 7 : const IMediaPipeline::MediaSourceAudio *kSource = dynamic_cast<IMediaPipeline::MediaSourceAudio *>(source.get());
598 7 : if (kSource)
599 : {
600 6 : firebolt::rialto::AudioConfig audioConfig = kSource->getAudioConfig();
601 : audioAttributes =
602 18 : firebolt::rialto::wrappers::AudioAttributesPrivate{"", // param set below.
603 6 : audioConfig.numberOfChannels, audioConfig.sampleRate,
604 : 0, // used only in one of logs in rdk_gstreamer_utils, no
605 : // need to set this param.
606 : 0, // used only in one of logs in rdk_gstreamer_utils, no
607 : // need to set this param.
608 6 : audioConfig.codecSpecificConfig.data(),
609 : static_cast<std::uint32_t>(
610 6 : audioConfig.codecSpecificConfig.size())};
611 6 : if (source->getMimeType() == "audio/mp4" || source->getMimeType() == "audio/aac")
612 : {
613 4 : audioAttributes->m_codecParam = "mp4a";
614 : }
615 2 : else if (source->getMimeType() == "audio/x-eac3")
616 : {
617 1 : audioAttributes->m_codecParam = "ec-3";
618 : }
619 1 : else if (source->getMimeType() == "audio/b-wav" || source->getMimeType() == "audio/x-raw")
620 : {
621 1 : audioAttributes->m_codecParam = "lpcm";
622 : }
623 6 : }
624 : else
625 : {
626 1 : RIALTO_SERVER_LOG_ERROR("Failed to cast source");
627 : }
628 :
629 7 : return audioAttributes;
630 : }
631 :
632 2 : void GstGenericPlayer::configAudioCap(firebolt::rialto::wrappers::AudioAttributesPrivate *pAttrib, bool *audioaac,
633 : bool svpenabled, GstCaps **appsrcCaps)
634 : {
635 : // this function comes from rdk_gstreamer_utils
636 2 : if (!pAttrib || !audioaac || !appsrcCaps)
637 : {
638 0 : RIALTO_SERVER_LOG_ERROR("configAudioCap: invalid null parameter");
639 0 : return;
640 : }
641 : gchar *capsString;
642 2 : RIALTO_SERVER_LOG_DEBUG("Config audio codec %s sampling rate %d channel %d alignment %d",
643 : pAttrib->m_codecParam.c_str(), pAttrib->m_samplesPerSecond, pAttrib->m_numberOfChannels,
644 : pAttrib->m_blockAlignment);
645 6 : if (pAttrib->m_codecParam.compare(0, 4, std::string("mp4a")) == 0)
646 : {
647 2 : RIALTO_SERVER_LOG_DEBUG("Using AAC");
648 2 : capsString = m_glibWrapper->gStrdupPrintf("audio/mpeg, mpegversion=4, enable-svp=(string)%s",
649 : svpenabled ? "true" : "false");
650 2 : *audioaac = true;
651 : }
652 : else
653 : {
654 0 : RIALTO_SERVER_LOG_DEBUG("Using EAC3");
655 0 : capsString = m_glibWrapper->gStrdupPrintf("audio/x-eac3, framed=(boolean)true, rate=(int)%u, channels=(int)%u, "
656 : "alignment=(string)frame, enable-svp=(string)%s",
657 : pAttrib->m_samplesPerSecond, pAttrib->m_numberOfChannels,
658 : svpenabled ? "true" : "false");
659 0 : *audioaac = false;
660 : }
661 2 : *appsrcCaps = m_gstWrapper->gstCapsFromString(capsString);
662 2 : m_glibWrapper->gFree(capsString);
663 : }
664 :
665 1 : void GstGenericPlayer::haltAudioPlayback()
666 : {
667 : // this function comes from rdk_gstreamer_utils
668 1 : if (!m_context.playbackGroup.m_curAudioPlaysinkBin || !m_context.playbackGroup.m_curAudioDecodeBin)
669 : {
670 0 : RIALTO_SERVER_LOG_ERROR("haltAudioPlayback: audio playsink bin or decode bin is null");
671 0 : return;
672 : }
673 1 : GstState currentState{GST_STATE_VOID_PENDING}, pending{GST_STATE_VOID_PENDING};
674 :
675 : // Transition Playsink to Ready
676 1 : if (GST_STATE_CHANGE_FAILURE ==
677 1 : m_gstWrapper->gstElementSetState(m_context.playbackGroup.m_curAudioPlaysinkBin, GST_STATE_READY))
678 : {
679 0 : RIALTO_SERVER_LOG_WARN("Failed to set AudioPlaysinkBin to READY");
680 0 : return;
681 : }
682 1 : m_gstWrapper->gstElementGetState(m_context.playbackGroup.m_curAudioPlaysinkBin, ¤tState, &pending,
683 : GST_CLOCK_TIME_NONE);
684 1 : if (currentState == GST_STATE_PAUSED)
685 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioPlaySinkBin State = %d", currentState);
686 : // Transition Decodebin to Paused
687 1 : if (GST_STATE_CHANGE_FAILURE ==
688 1 : m_gstWrapper->gstElementSetState(m_context.playbackGroup.m_curAudioDecodeBin, GST_STATE_PAUSED))
689 : {
690 0 : RIALTO_SERVER_LOG_WARN("Failed to set AudioDecodeBin to PAUSED");
691 0 : return;
692 : }
693 1 : m_gstWrapper->gstElementGetState(m_context.playbackGroup.m_curAudioDecodeBin, ¤tState, &pending,
694 : GST_CLOCK_TIME_NONE);
695 1 : if (currentState == GST_STATE_PAUSED)
696 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current DecodeBin State = %d", currentState);
697 : }
698 :
699 1 : void GstGenericPlayer::resumeAudioPlayback()
700 : {
701 : // this function comes from rdk_gstreamer_utils
702 1 : if (!m_context.playbackGroup.m_curAudioPlaysinkBin || !m_context.playbackGroup.m_curAudioDecodeBin)
703 : {
704 0 : RIALTO_SERVER_LOG_ERROR("resumeAudioPlayback: audio playsink bin or decode bin is null");
705 0 : return;
706 : }
707 1 : GstState currentState{GST_STATE_VOID_PENDING}, pending{GST_STATE_VOID_PENDING};
708 1 : m_gstWrapper->gstElementSyncStateWithParent(m_context.playbackGroup.m_curAudioPlaysinkBin);
709 1 : m_gstWrapper->gstElementGetState(m_context.playbackGroup.m_curAudioPlaysinkBin, ¤tState, &pending,
710 : GST_CLOCK_TIME_NONE);
711 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> AudioPlaysinkbin State = %d Pending = %d", currentState, pending);
712 1 : m_gstWrapper->gstElementSyncStateWithParent(m_context.playbackGroup.m_curAudioDecodeBin);
713 1 : m_gstWrapper->gstElementGetState(m_context.playbackGroup.m_curAudioDecodeBin, ¤tState, &pending,
714 : GST_CLOCK_TIME_NONE);
715 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> Decodebin State = %d Pending = %d", currentState, pending);
716 : }
717 :
718 1 : void GstGenericPlayer::firstTimeSwitchFromAC3toAAC(GstCaps *newAudioCaps)
719 : {
720 : // this function comes from rdk_gstreamer_utils
721 1 : if (!m_context.playbackGroup.m_curAudioTypefind || !m_context.playbackGroup.m_curAudioDecodeBin)
722 : {
723 0 : RIALTO_SERVER_LOG_ERROR("firstTimeSwitchFromAC3toAAC: audio typefind or decode bin is null");
724 0 : return;
725 : }
726 1 : GstState currentState{GST_STATE_VOID_PENDING}, pending{GST_STATE_VOID_PENDING};
727 1 : GstPad *pTypfdSrcPad = NULL;
728 1 : GstPad *pTypfdSrcPeerPad = NULL;
729 1 : GstPad *pNewAudioDecoderSrcPad = NULL;
730 1 : GstElement *newAudioParse = NULL;
731 1 : GstElement *newAudioDecoder = NULL;
732 1 : GstElement *newQueue = NULL;
733 1 : gboolean linkRet = false;
734 :
735 : /* Get the SinkPad of ASink - pTypfdSrcPeerPad */
736 1 : if ((pTypfdSrcPad = m_gstWrapper->gstElementGetStaticPad(m_context.playbackGroup.m_curAudioTypefind, "src")) !=
737 : NULL) // Unref the Pad
738 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current Typefind SrcPad = %p", pTypfdSrcPad);
739 1 : if ((pTypfdSrcPeerPad = m_gstWrapper->gstPadGetPeer(pTypfdSrcPad)) != NULL) // Unref the Pad
740 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current Typefind Src Downstream Element Pad = %p", pTypfdSrcPeerPad);
741 : // AudioDecoder Downstream Unlink
742 1 : if (m_gstWrapper->gstPadUnlink(pTypfdSrcPad, pTypfdSrcPeerPad) == FALSE)
743 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Typefind Downstream Unlink Failed");
744 1 : newAudioParse = m_gstWrapper->gstElementFactoryMake("aacparse", "aacparse");
745 1 : newAudioDecoder = m_gstWrapper->gstElementFactoryMake("avdec_aac", "avdec_aac");
746 1 : newQueue = m_gstWrapper->gstElementFactoryMake("queue", "aqueue");
747 : // Add new Decoder to Decodebin
748 1 : if (m_gstWrapper->gstBinAdd(GST_BIN(m_context.playbackGroup.m_curAudioDecodeBin.load()), newAudioDecoder) == TRUE)
749 : {
750 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> Added New AudioDecoder = %p", newAudioDecoder);
751 : }
752 : // Add new Parser to Decodebin
753 1 : if (m_gstWrapper->gstBinAdd(GST_BIN(m_context.playbackGroup.m_curAudioDecodeBin.load()), newAudioParse) == TRUE)
754 : {
755 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> Added New AudioParser = %p", newAudioParse);
756 : }
757 : // Add new Queue to Decodebin
758 1 : if (m_gstWrapper->gstBinAdd(GST_BIN(m_context.playbackGroup.m_curAudioDecodeBin.load()), newQueue) == TRUE)
759 : {
760 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> Added New queue = %p", newQueue);
761 : }
762 1 : if ((pNewAudioDecoderSrcPad = m_gstWrapper->gstElementGetStaticPad(newAudioDecoder, "src")) != NULL) // Unref the Pad
763 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioDecoder Src Pad = %p", pNewAudioDecoderSrcPad);
764 : // Connect decoder to ASINK
765 1 : if (m_gstWrapper->gstPadLink(pNewAudioDecoderSrcPad, pTypfdSrcPeerPad) != GST_PAD_LINK_OK)
766 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioDecoder Downstream Link Failed");
767 2 : linkRet = m_gstWrapper->gstElementLink(newAudioParse, newQueue) &&
768 1 : m_gstWrapper->gstElementLink(newQueue, newAudioDecoder);
769 1 : if (!linkRet)
770 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Downstream Link Failed for typefind, parser, decoder");
771 : /* Force Caps */
772 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> Typefind Setting to READY");
773 1 : if (GST_STATE_CHANGE_FAILURE ==
774 1 : m_gstWrapper->gstElementSetState(m_context.playbackGroup.m_curAudioTypefind, GST_STATE_READY))
775 : {
776 0 : RIALTO_SERVER_LOG_WARN("Failed to set Typefind to READY");
777 0 : m_gstWrapper->gstObjectUnref(pTypfdSrcPad);
778 0 : m_gstWrapper->gstObjectUnref(pTypfdSrcPeerPad);
779 0 : m_gstWrapper->gstObjectUnref(pNewAudioDecoderSrcPad);
780 0 : return;
781 : }
782 1 : m_glibWrapper->gObjectSet(G_OBJECT(m_context.playbackGroup.m_curAudioTypefind), "force-caps", newAudioCaps, NULL);
783 1 : m_gstWrapper->gstElementSyncStateWithParent(m_context.playbackGroup.m_curAudioTypefind);
784 1 : m_gstWrapper->gstElementGetState(m_context.playbackGroup.m_curAudioTypefind, ¤tState, &pending,
785 : GST_CLOCK_TIME_NONE);
786 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> New Typefind State = %d Pending = %d", currentState, pending);
787 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> Typefind Syncing with Parent");
788 1 : m_context.playbackGroup.m_linkTypefindParser = true;
789 : /* Update the state */
790 1 : m_gstWrapper->gstElementSyncStateWithParent(newAudioDecoder);
791 1 : m_gstWrapper->gstElementGetState(newAudioDecoder, ¤tState, &pending, GST_CLOCK_TIME_NONE);
792 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioDecoder State = %d Pending = %d", currentState, pending);
793 1 : m_gstWrapper->gstElementSyncStateWithParent(newQueue);
794 1 : m_gstWrapper->gstElementGetState(newQueue, ¤tState, &pending, GST_CLOCK_TIME_NONE);
795 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> New queue State = %d Pending = %d", currentState, pending);
796 1 : m_gstWrapper->gstElementSyncStateWithParent(newAudioParse);
797 1 : m_gstWrapper->gstElementGetState(newAudioParse, ¤tState, &pending, GST_CLOCK_TIME_NONE);
798 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioParser State = %d Pending = %d", currentState, pending);
799 1 : m_gstWrapper->gstObjectUnref(pTypfdSrcPad);
800 1 : m_gstWrapper->gstObjectUnref(pTypfdSrcPeerPad);
801 1 : m_gstWrapper->gstObjectUnref(pNewAudioDecoderSrcPad);
802 1 : return;
803 : }
804 :
805 1 : bool GstGenericPlayer::switchAudioCodec(bool isAudioAAC, GstCaps *newAudioCaps)
806 : { // this function comes from rdk_gstreamer_utils
807 1 : bool ret = false;
808 1 : RIALTO_SERVER_LOG_DEBUG("Current Audio Codec AAC = %d Same as Incoming audio Codec AAC = %d",
809 : m_context.playbackGroup.m_isAudioAAC, isAudioAAC);
810 1 : if (m_context.playbackGroup.m_isAudioAAC == isAudioAAC)
811 : {
812 0 : return ret;
813 : }
814 1 : if ((m_context.playbackGroup.m_curAudioDecoder == NULL) && (!(m_context.playbackGroup.m_isAudioAAC)) && (isAudioAAC))
815 : {
816 1 : firstTimeSwitchFromAC3toAAC(newAudioCaps);
817 1 : m_context.playbackGroup.m_isAudioAAC = isAudioAAC;
818 1 : return true;
819 : }
820 0 : if (!m_context.playbackGroup.m_curAudioDecoder || !m_context.playbackGroup.m_curAudioParse ||
821 0 : !m_context.playbackGroup.m_curAudioDecodeBin)
822 : {
823 0 : RIALTO_SERVER_LOG_ERROR("switchAudioCodec: audio decoder, parser or decode bin is null");
824 0 : return false;
825 : }
826 0 : GstElement *newAudioParse = NULL;
827 0 : GstElement *newAudioDecoder = NULL;
828 0 : GstPad *newAudioParseSrcPad = NULL;
829 0 : GstPad *newAudioParseSinkPad = NULL;
830 0 : GstPad *newAudioDecoderSrcPad = NULL;
831 0 : GstPad *newAudioDecoderSinkPad = NULL;
832 0 : GstPad *audioDecSrcPad = NULL;
833 0 : GstPad *audioDecSinkPad = NULL;
834 0 : GstPad *audioDecSrcPeerPad = NULL;
835 0 : GstPad *audioDecSinkPeerPad = NULL;
836 0 : GstPad *audioParseSrcPad = NULL;
837 0 : GstPad *audioParseSinkPad = NULL;
838 0 : GstPad *audioParseSrcPeerPad = NULL;
839 0 : GstPad *audioParseSinkPeerPad = NULL;
840 0 : GstState currentState{GST_STATE_VOID_PENDING}, pending{GST_STATE_VOID_PENDING};
841 :
842 : // Get AudioDecoder Src Pads
843 0 : if ((audioDecSrcPad = m_gstWrapper->gstElementGetStaticPad(m_context.playbackGroup.m_curAudioDecoder, "src")) !=
844 : NULL) // Unref the Pad
845 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioDecoder Src Pad = %p", audioDecSrcPad);
846 : // Get AudioDecoder Sink Pads
847 0 : if ((audioDecSinkPad = m_gstWrapper->gstElementGetStaticPad(m_context.playbackGroup.m_curAudioDecoder, "sink")) !=
848 : NULL) // Unref the Pad
849 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioDecoder Sink Pad = %p", audioDecSinkPad);
850 : // Get AudioDecoder Src Peer i.e. Downstream Element Pad
851 0 : if ((audioDecSrcPeerPad = m_gstWrapper->gstPadGetPeer(audioDecSrcPad)) != NULL) // Unref the Pad
852 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioDecoder Src Downstream Element Pad = %p", audioDecSrcPeerPad);
853 : // Get AudioDecoder Sink Peer i.e. Upstream Element Pad
854 0 : if ((audioDecSinkPeerPad = m_gstWrapper->gstPadGetPeer(audioDecSinkPad)) != NULL) // Unref the Pad
855 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioDecoder Sink Upstream Element Pad = %p", audioDecSinkPeerPad);
856 : // Get AudioParser Src Pads
857 0 : if ((audioParseSrcPad = m_gstWrapper->gstElementGetStaticPad(m_context.playbackGroup.m_curAudioParse, "src")) !=
858 : NULL) // Unref the Pad
859 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioParser Src Pad = %p", audioParseSrcPad);
860 : // Get AudioParser Sink Pads
861 0 : if ((audioParseSinkPad = m_gstWrapper->gstElementGetStaticPad(m_context.playbackGroup.m_curAudioParse, "sink")) !=
862 : NULL) // Unref the Pad
863 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioParser Sink Pad = %p", audioParseSinkPad);
864 : // Get AudioParser Src Peer i.e. Downstream Element Pad
865 0 : if ((audioParseSrcPeerPad = m_gstWrapper->gstPadGetPeer(audioParseSrcPad)) != NULL) // Unref the Peer Pad
866 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioParser Src Downstream Element Pad = %p", audioParseSrcPeerPad);
867 : // Get AudioParser Sink Peer i.e. Upstream Element Pad
868 0 : if ((audioParseSinkPeerPad = m_gstWrapper->gstPadGetPeer(audioParseSinkPad)) != NULL) // Unref the Peer Pad
869 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioParser Sink Upstream Element Pad = %p", audioParseSinkPeerPad);
870 : // AudioDecoder Downstream Unlink
871 0 : if (m_gstWrapper->gstPadUnlink(audioDecSrcPad, audioDecSrcPeerPad) == FALSE)
872 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> AudioDecoder Downstream Unlink Failed");
873 : // AudioDecoder Upstream Unlink
874 0 : if (m_gstWrapper->gstPadUnlink(audioDecSinkPeerPad, audioDecSinkPad) == FALSE)
875 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> AudioDecoder Upstream Unlink Failed");
876 : // AudioParser Downstream Unlink
877 0 : if (m_gstWrapper->gstPadUnlink(audioParseSrcPad, audioParseSrcPeerPad) == FALSE)
878 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> AudioParser Downstream Unlink Failed");
879 : // AudioParser Upstream Unlink
880 0 : if (m_gstWrapper->gstPadUnlink(audioParseSinkPeerPad, audioParseSinkPad) == FALSE)
881 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> AudioParser Upstream Unlink Failed");
882 : // Current Audio Decoder NULL
883 0 : if (GST_STATE_CHANGE_FAILURE ==
884 0 : m_gstWrapper->gstElementSetState(m_context.playbackGroup.m_curAudioDecoder, GST_STATE_NULL))
885 : {
886 0 : RIALTO_SERVER_LOG_WARN("Failed to set AudioDecoder to NULL");
887 : }
888 0 : m_gstWrapper->gstElementGetState(m_context.playbackGroup.m_curAudioDecoder, ¤tState, &pending,
889 : GST_CLOCK_TIME_NONE);
890 0 : if (currentState == GST_STATE_NULL)
891 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioDecoder State = %d", currentState);
892 : // Current Audio Parser NULL
893 0 : if (GST_STATE_CHANGE_FAILURE ==
894 0 : m_gstWrapper->gstElementSetState(m_context.playbackGroup.m_curAudioParse, GST_STATE_NULL))
895 : {
896 0 : RIALTO_SERVER_LOG_WARN("Failed to set AudioParser to NULL");
897 : }
898 0 : m_gstWrapper->gstElementGetState(m_context.playbackGroup.m_curAudioParse, ¤tState, &pending,
899 : GST_CLOCK_TIME_NONE);
900 0 : if (currentState == GST_STATE_NULL)
901 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioParser State = %d", currentState);
902 : // Remove Audio Decoder From Decodebin
903 0 : if (m_gstWrapper->gstBinRemove(GST_BIN(m_context.playbackGroup.m_curAudioDecodeBin.load()),
904 0 : m_context.playbackGroup.m_curAudioDecoder) == TRUE)
905 : {
906 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Removed AudioDecoder = %p", m_context.playbackGroup.m_curAudioDecoder);
907 0 : m_context.playbackGroup.m_curAudioDecoder = NULL;
908 : }
909 : // Remove Audio Parser From Decodebin
910 0 : if (m_gstWrapper->gstBinRemove(GST_BIN(m_context.playbackGroup.m_curAudioDecodeBin.load()),
911 0 : m_context.playbackGroup.m_curAudioParse) == TRUE)
912 : {
913 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Removed AudioParser = %p", m_context.playbackGroup.m_curAudioParse);
914 0 : m_context.playbackGroup.m_curAudioParse = NULL;
915 : }
916 : // Create new Audio Decoder and Parser. The inverse of the current
917 0 : if (m_context.playbackGroup.m_isAudioAAC)
918 : {
919 0 : newAudioParse = m_gstWrapper->gstElementFactoryMake("ac3parse", "ac3parse");
920 0 : newAudioDecoder = m_gstWrapper->gstElementFactoryMake("identity", "fake_aud_ac3dec");
921 : }
922 : else
923 : {
924 0 : newAudioParse = m_gstWrapper->gstElementFactoryMake("aacparse", "aacparse");
925 0 : newAudioDecoder = m_gstWrapper->gstElementFactoryMake("avdec_aac", "avdec_aac");
926 : }
927 : {
928 0 : GstPadLinkReturn gstPadLinkRet = GST_PAD_LINK_OK;
929 0 : GstElement *audioParseUpstreamEl = NULL;
930 : // Add new Decoder to Decodebin
931 0 : if (m_gstWrapper->gstBinAdd(GST_BIN(m_context.playbackGroup.m_curAudioDecodeBin.load()), newAudioDecoder) == TRUE)
932 : {
933 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Added New AudioDecoder = %p", newAudioDecoder);
934 : }
935 : // Add new Parser to Decodebin
936 0 : if (m_gstWrapper->gstBinAdd(GST_BIN(m_context.playbackGroup.m_curAudioDecodeBin.load()), newAudioParse) == TRUE)
937 : {
938 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Added New AudioParser = %p", newAudioParse);
939 : }
940 0 : if ((newAudioDecoderSrcPad = m_gstWrapper->gstElementGetStaticPad(newAudioDecoder, "src")) !=
941 : NULL) // Unref the Pad
942 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioDecoder Src Pad = %p", newAudioDecoderSrcPad);
943 0 : if ((newAudioDecoderSinkPad = m_gstWrapper->gstElementGetStaticPad(newAudioDecoder, "sink")) !=
944 : NULL) // Unref the Pad
945 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioDecoder Sink Pad = %p", newAudioDecoderSinkPad);
946 : // Link New Decoder to Downstream followed by UpStream
947 0 : if ((gstPadLinkRet = m_gstWrapper->gstPadLink(newAudioDecoderSrcPad, audioDecSrcPeerPad)) != GST_PAD_LINK_OK)
948 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioDecoder Downstream Link Failed");
949 0 : if ((gstPadLinkRet = m_gstWrapper->gstPadLink(audioDecSinkPeerPad, newAudioDecoderSinkPad)) != GST_PAD_LINK_OK)
950 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioDecoder Upstream Link Failed");
951 0 : if ((newAudioParseSrcPad = m_gstWrapper->gstElementGetStaticPad(newAudioParse, "src")) != NULL) // Unref the Pad
952 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioParser Src Pad = %p", newAudioParseSrcPad);
953 0 : if ((newAudioParseSinkPad = m_gstWrapper->gstElementGetStaticPad(newAudioParse, "sink")) != NULL) // Unref the Pad
954 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioParser Sink Pad = %p", newAudioParseSinkPad);
955 : // Link New Parser to Downstream followed by UpStream
956 0 : if ((gstPadLinkRet = m_gstWrapper->gstPadLink(newAudioParseSrcPad, audioParseSrcPeerPad)) != GST_PAD_LINK_OK)
957 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioParser Downstream Link Failed %d", gstPadLinkRet);
958 0 : if ((audioParseUpstreamEl = GST_ELEMENT_CAST(m_gstWrapper->gstPadGetParent(audioParseSinkPeerPad))) ==
959 0 : m_context.playbackGroup.m_curAudioTypefind)
960 : {
961 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Typefind Setting to READY");
962 0 : if (GST_STATE_CHANGE_FAILURE == m_gstWrapper->gstElementSetState(audioParseUpstreamEl, GST_STATE_READY))
963 : {
964 0 : RIALTO_SERVER_LOG_WARN("Failed to set Typefind to READY in switchAudioCodec");
965 : }
966 0 : m_glibWrapper->gObjectSet(G_OBJECT(audioParseUpstreamEl), "force-caps", newAudioCaps, NULL);
967 0 : m_gstWrapper->gstElementSyncStateWithParent(audioParseUpstreamEl);
968 0 : m_gstWrapper->gstElementGetState(audioParseUpstreamEl, ¤tState, &pending, GST_CLOCK_TIME_NONE);
969 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New Typefind State = %d Pending = %d", currentState, pending);
970 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Typefind Syncing with Parent");
971 0 : m_context.playbackGroup.m_linkTypefindParser = true;
972 0 : m_gstWrapper->gstObjectUnref(audioParseUpstreamEl);
973 : }
974 0 : m_gstWrapper->gstObjectUnref(newAudioDecoderSrcPad);
975 0 : m_gstWrapper->gstObjectUnref(newAudioDecoderSinkPad);
976 0 : m_gstWrapper->gstObjectUnref(newAudioParseSrcPad);
977 0 : m_gstWrapper->gstObjectUnref(newAudioParseSinkPad);
978 : }
979 0 : m_gstWrapper->gstObjectUnref(audioParseSinkPeerPad);
980 0 : m_gstWrapper->gstObjectUnref(audioParseSrcPeerPad);
981 0 : m_gstWrapper->gstObjectUnref(audioParseSinkPad);
982 0 : m_gstWrapper->gstObjectUnref(audioParseSrcPad);
983 0 : m_gstWrapper->gstObjectUnref(audioDecSinkPeerPad);
984 0 : m_gstWrapper->gstObjectUnref(audioDecSrcPeerPad);
985 0 : m_gstWrapper->gstObjectUnref(audioDecSinkPad);
986 0 : m_gstWrapper->gstObjectUnref(audioDecSrcPad);
987 0 : m_gstWrapper->gstElementSyncStateWithParent(newAudioDecoder);
988 0 : m_gstWrapper->gstElementGetState(newAudioDecoder, ¤tState, &pending, GST_CLOCK_TIME_NONE);
989 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioDecoder State = %d Pending = %d", currentState, pending);
990 0 : m_gstWrapper->gstElementSyncStateWithParent(newAudioParse);
991 0 : m_gstWrapper->gstElementGetState(newAudioParse, ¤tState, &pending, GST_CLOCK_TIME_NONE);
992 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioParser State = %d Pending = %d", currentState, pending);
993 0 : m_context.playbackGroup.m_isAudioAAC = isAudioAAC;
994 0 : return true;
995 : }
996 :
997 2 : bool GstGenericPlayer::performAudioTrackCodecChannelSwitch(const void *pSampleAttr,
998 : firebolt::rialto::wrappers::AudioAttributesPrivate *pAudioAttr,
999 : uint32_t *pStatus, unsigned int *pui32Delay,
1000 : long long *pAudioChangeTargetPts, // NOLINT(runtime/int)
1001 : const long long *pcurrentDispPts, // NOLINT(runtime/int)
1002 : unsigned int *audioChangeStage, GstCaps **appsrcCaps,
1003 : bool *audioaac, bool svpenabled, GstElement *aSrc, bool *ret)
1004 : {
1005 : // this function comes from rdk_gstreamer_utils
1006 2 : if (!pStatus || !pui32Delay || !pAudioChangeTargetPts || !pcurrentDispPts || !audioChangeStage || !appsrcCaps ||
1007 2 : !audioaac || !aSrc || !ret)
1008 : {
1009 0 : RIALTO_SERVER_LOG_ERROR("performAudioTrackCodecChannelSwitch: invalid null parameter");
1010 0 : return false;
1011 : }
1012 :
1013 2 : constexpr uint32_t kOk = 0;
1014 2 : constexpr uint32_t kWaitWhileIdling = 100;
1015 2 : constexpr int kAudioChangeGapThresholdMS = 40;
1016 2 : constexpr unsigned int kAudchgAlign = 3;
1017 :
1018 : struct timespec ts, now;
1019 : unsigned int reconfigDelayMs;
1020 2 : clock_gettime(CLOCK_MONOTONIC, &ts);
1021 2 : if (*pStatus != kOk || pSampleAttr == nullptr)
1022 : {
1023 0 : RIALTO_SERVER_LOG_DEBUG("No audio data ready yet");
1024 0 : *pui32Delay = kWaitWhileIdling;
1025 0 : *ret = false;
1026 0 : return true;
1027 : }
1028 2 : RIALTO_SERVER_LOG_DEBUG("Received first audio packet after a flush, PTS");
1029 2 : if (pAudioAttr)
1030 : {
1031 2 : const char *pCodecStr = pAudioAttr->m_codecParam.c_str();
1032 2 : const char *pCodecAcc = strstr(pCodecStr, "mp4a");
1033 2 : bool isAudioAAC = (pCodecAcc) ? true : false;
1034 2 : bool isCodecSwitch = false;
1035 2 : RIALTO_SERVER_LOG_DEBUG("Audio Attribute format %s channel %d samp %d, bitrate %d blockAlignment %d", pCodecStr,
1036 : pAudioAttr->m_numberOfChannels, pAudioAttr->m_samplesPerSecond, pAudioAttr->m_bitrate,
1037 : pAudioAttr->m_blockAlignment);
1038 2 : *pAudioChangeTargetPts = *pcurrentDispPts;
1039 2 : *audioChangeStage = kAudchgAlign;
1040 2 : if (*appsrcCaps)
1041 : {
1042 2 : m_gstWrapper->gstCapsUnref(*appsrcCaps);
1043 2 : *appsrcCaps = NULL;
1044 : }
1045 2 : if (isAudioAAC != *audioaac)
1046 1 : isCodecSwitch = true;
1047 2 : configAudioCap(pAudioAttr, audioaac, svpenabled, appsrcCaps);
1048 : {
1049 2 : gboolean sendRet = FALSE;
1050 2 : GstEvent *flushStart = NULL;
1051 2 : GstEvent *flushStop = NULL;
1052 2 : flushStart = m_gstWrapper->gstEventNewFlushStart();
1053 2 : sendRet = m_gstWrapper->gstElementSendEvent(aSrc, flushStart);
1054 2 : if (!sendRet)
1055 0 : RIALTO_SERVER_LOG_DEBUG("failed to send flush-start event");
1056 2 : flushStop = m_gstWrapper->gstEventNewFlushStop(TRUE);
1057 2 : sendRet = m_gstWrapper->gstElementSendEvent(aSrc, flushStop);
1058 2 : if (!sendRet)
1059 0 : RIALTO_SERVER_LOG_DEBUG("failed to send flush-stop event");
1060 : }
1061 2 : if (!isCodecSwitch)
1062 : {
1063 1 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(aSrc), *appsrcCaps);
1064 : }
1065 : else
1066 : {
1067 1 : RIALTO_SERVER_LOG_DEBUG("CODEC SWITCH mAudioAAC = %d", *audioaac);
1068 1 : haltAudioPlayback();
1069 1 : if (switchAudioCodec(*audioaac, *appsrcCaps) == false)
1070 : {
1071 0 : RIALTO_SERVER_LOG_DEBUG("CODEC SWITCH FAILED switchAudioCodec mAudioAAC = %d", *audioaac);
1072 : }
1073 1 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(aSrc), *appsrcCaps);
1074 1 : resumeAudioPlayback();
1075 : }
1076 2 : clock_gettime(CLOCK_MONOTONIC, &now);
1077 2 : reconfigDelayMs = now.tv_nsec > ts.tv_nsec ? (now.tv_nsec - ts.tv_nsec) / 1000000
1078 0 : : (1000 - (ts.tv_nsec - now.tv_nsec) / 1000000);
1079 2 : (*pAudioChangeTargetPts) += (reconfigDelayMs + kAudioChangeGapThresholdMS);
1080 : }
1081 : else
1082 : {
1083 0 : RIALTO_SERVER_LOG_DEBUG("first audio after change no attribute drop!");
1084 0 : *pui32Delay = 0;
1085 0 : *ret = false;
1086 0 : return true;
1087 : }
1088 2 : *ret = true;
1089 2 : return true;
1090 : }
1091 :
1092 1 : bool GstGenericPlayer::setImmediateOutput(const MediaSourceType &mediaSourceType, bool immediateOutputParam)
1093 : {
1094 1 : if (!m_workerThread)
1095 0 : return false;
1096 :
1097 2 : m_workerThread->enqueueTask(
1098 2 : m_taskFactory->createSetImmediateOutput(m_context, *this, mediaSourceType, immediateOutputParam));
1099 1 : return true;
1100 : }
1101 :
1102 1 : bool GstGenericPlayer::setReportDecodeErrors(const MediaSourceType &mediaSourceType, bool reportDecodeErrors)
1103 : {
1104 1 : if (!m_workerThread)
1105 0 : return false;
1106 :
1107 2 : m_workerThread->enqueueTask(
1108 2 : m_taskFactory->createSetReportDecodeErrors(m_context, *this, mediaSourceType, reportDecodeErrors));
1109 1 : return true;
1110 : }
1111 :
1112 2 : bool GstGenericPlayer::getQueuedFrames(uint32_t &queuedFrames)
1113 : {
1114 2 : bool returnValue{false};
1115 2 : GstElement *decoder{getDecoder(MediaSourceType::VIDEO)};
1116 2 : if (decoder)
1117 : {
1118 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "queued-frames"))
1119 : {
1120 1 : m_glibWrapper->gObjectGet(decoder, "queued-frames", &queuedFrames, nullptr);
1121 1 : returnValue = true;
1122 : }
1123 : else
1124 : {
1125 1 : RIALTO_SERVER_LOG_ERROR("queued-frames not supported in element %s", GST_ELEMENT_NAME(decoder));
1126 : }
1127 2 : m_gstWrapper->gstObjectUnref(decoder);
1128 : }
1129 : else
1130 : {
1131 0 : RIALTO_SERVER_LOG_ERROR("Failed to get queued-frames property, decoder is NULL");
1132 : }
1133 :
1134 2 : return returnValue;
1135 : }
1136 :
1137 5 : bool GstGenericPlayer::getImmediateOutput(const MediaSourceType &mediaSourceType, bool &immediateOutputRef)
1138 : {
1139 5 : bool returnValue{false};
1140 5 : GstElement *sink{getSink(mediaSourceType)};
1141 5 : if (sink)
1142 : {
1143 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "immediate-output"))
1144 : {
1145 2 : m_glibWrapper->gObjectGet(sink, "immediate-output", &immediateOutputRef, nullptr);
1146 2 : returnValue = true;
1147 : }
1148 : else
1149 : {
1150 1 : RIALTO_SERVER_LOG_ERROR("immediate-output not supported in element %s", GST_ELEMENT_NAME(sink));
1151 : }
1152 3 : m_gstWrapper->gstObjectUnref(sink);
1153 : }
1154 : else
1155 : {
1156 2 : RIALTO_SERVER_LOG_ERROR("Failed to set immediate-output property, sink is NULL");
1157 : }
1158 :
1159 5 : return returnValue;
1160 : }
1161 :
1162 5 : bool GstGenericPlayer::getStats(const MediaSourceType &mediaSourceType, uint64_t &renderedFrames, uint64_t &droppedFrames)
1163 : {
1164 5 : bool returnValue{false};
1165 5 : GstElement *sink{getSink(mediaSourceType)};
1166 5 : if (sink)
1167 : {
1168 3 : GstStructure *stats{nullptr};
1169 3 : m_glibWrapper->gObjectGet(sink, "stats", &stats, nullptr);
1170 3 : if (!stats)
1171 : {
1172 1 : RIALTO_SERVER_LOG_ERROR("failed to get stats from '%s'", GST_ELEMENT_NAME(sink));
1173 : }
1174 : else
1175 : {
1176 : guint64 renderedFramesTmp;
1177 : guint64 droppedFramesTmp;
1178 3 : if (m_gstWrapper->gstStructureGetUint64(stats, "rendered", &renderedFramesTmp) &&
1179 1 : m_gstWrapper->gstStructureGetUint64(stats, "dropped", &droppedFramesTmp))
1180 : {
1181 1 : renderedFrames = renderedFramesTmp;
1182 1 : droppedFrames = droppedFramesTmp;
1183 1 : returnValue = true;
1184 : }
1185 : else
1186 : {
1187 1 : RIALTO_SERVER_LOG_ERROR("failed to get 'rendered' or 'dropped' from structure (%s)",
1188 : GST_ELEMENT_NAME(sink));
1189 : }
1190 2 : m_gstWrapper->gstStructureFree(stats);
1191 : }
1192 3 : m_gstWrapper->gstObjectUnref(sink);
1193 : }
1194 : else
1195 : {
1196 2 : RIALTO_SERVER_LOG_ERROR("Failed to get stats, sink is NULL");
1197 : }
1198 :
1199 5 : return returnValue;
1200 : }
1201 :
1202 4 : GstBuffer *GstGenericPlayer::createBuffer(const IMediaPipeline::MediaSegment &mediaSegment) const
1203 : {
1204 4 : GstBuffer *gstBuffer = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getDataLength(), nullptr);
1205 4 : m_gstWrapper->gstBufferFill(gstBuffer, 0, mediaSegment.getData(), mediaSegment.getDataLength());
1206 :
1207 4 : if (mediaSegment.isEncrypted())
1208 : {
1209 3 : GstBuffer *keyId = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getKeyId().size(), nullptr);
1210 3 : m_gstWrapper->gstBufferFill(keyId, 0, mediaSegment.getKeyId().data(), mediaSegment.getKeyId().size());
1211 :
1212 3 : GstBuffer *initVector = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getInitVector().size(), nullptr);
1213 6 : m_gstWrapper->gstBufferFill(initVector, 0, mediaSegment.getInitVector().data(),
1214 3 : mediaSegment.getInitVector().size());
1215 3 : GstBuffer *subsamples{nullptr};
1216 3 : if (!mediaSegment.getSubSamples().empty())
1217 : {
1218 3 : auto subsamplesRawSize = mediaSegment.getSubSamples().size() * (sizeof(guint16) + sizeof(guint32));
1219 3 : guint8 *subsamplesRaw = static_cast<guint8 *>(m_glibWrapper->gMalloc(subsamplesRawSize));
1220 : GstByteWriter writer;
1221 3 : m_gstWrapper->gstByteWriterInitWithData(&writer, subsamplesRaw, subsamplesRawSize, FALSE);
1222 :
1223 6 : for (const auto &subSample : mediaSegment.getSubSamples())
1224 : {
1225 3 : m_gstWrapper->gstByteWriterPutUint16Be(&writer, subSample.numClearBytes);
1226 3 : m_gstWrapper->gstByteWriterPutUint32Be(&writer, subSample.numEncryptedBytes);
1227 : }
1228 3 : subsamples = m_gstWrapper->gstBufferNewWrapped(subsamplesRaw, subsamplesRawSize);
1229 : }
1230 :
1231 3 : uint32_t crypt = 0;
1232 3 : uint32_t skip = 0;
1233 3 : bool encryptionPatternSet = mediaSegment.getEncryptionPattern(crypt, skip);
1234 :
1235 3 : GstRialtoProtectionData data = {mediaSegment.getMediaKeySessionId(),
1236 3 : static_cast<uint32_t>(mediaSegment.getSubSamples().size()),
1237 3 : mediaSegment.getInitWithLast15(),
1238 : keyId,
1239 : initVector,
1240 : subsamples,
1241 6 : mediaSegment.getCipherMode(),
1242 : crypt,
1243 : skip,
1244 : encryptionPatternSet,
1245 6 : m_context.decryptionService};
1246 :
1247 3 : if (!m_protectionMetadataWrapper->addProtectionMetadata(gstBuffer, data))
1248 : {
1249 1 : RIALTO_SERVER_LOG_ERROR("Failed to add protection metadata");
1250 1 : if (keyId)
1251 : {
1252 1 : m_gstWrapper->gstBufferUnref(keyId);
1253 : }
1254 1 : if (initVector)
1255 : {
1256 1 : m_gstWrapper->gstBufferUnref(initVector);
1257 : }
1258 1 : if (subsamples)
1259 : {
1260 1 : m_gstWrapper->gstBufferUnref(subsamples);
1261 : }
1262 : }
1263 : }
1264 :
1265 4 : GST_BUFFER_TIMESTAMP(gstBuffer) = mediaSegment.getTimeStamp();
1266 4 : GST_BUFFER_DURATION(gstBuffer) = mediaSegment.getDuration();
1267 4 : return gstBuffer;
1268 : }
1269 :
1270 4 : void GstGenericPlayer::notifyNeedMediaData(const MediaSourceType mediaSource)
1271 : {
1272 4 : auto elem = m_context.streamInfo.find(mediaSource);
1273 4 : if (elem != m_context.streamInfo.end())
1274 : {
1275 2 : StreamInfo &streamInfo = elem->second;
1276 2 : streamInfo.isNeedDataPending = false;
1277 :
1278 : // Send new NeedMediaData if we still need it
1279 2 : if (m_gstPlayerClient && streamInfo.isDataNeeded)
1280 : {
1281 2 : streamInfo.isNeedDataPending = m_gstPlayerClient->notifyNeedMediaData(mediaSource);
1282 : }
1283 : }
1284 : else
1285 : {
1286 2 : RIALTO_SERVER_LOG_WARN("Media type %s could not be found", common::convertMediaSourceType(mediaSource));
1287 : }
1288 4 : }
1289 :
1290 19 : void GstGenericPlayer::attachData(const firebolt::rialto::MediaSourceType mediaType)
1291 : {
1292 19 : auto elem = m_context.streamInfo.find(mediaType);
1293 19 : if (elem != m_context.streamInfo.end())
1294 : {
1295 16 : StreamInfo &streamInfo = elem->second;
1296 16 : if (streamInfo.buffers.empty() || !streamInfo.isDataNeeded)
1297 : {
1298 2 : return;
1299 : }
1300 :
1301 14 : if (firebolt::rialto::MediaSourceType::SUBTITLE == mediaType)
1302 : {
1303 2 : setTextTrackPositionIfRequired(streamInfo.appSrc);
1304 : }
1305 : else
1306 : {
1307 36 : pushSampleIfRequired(streamInfo.appSrc, common::convertMediaSourceType(mediaType));
1308 : }
1309 14 : if (mediaType == firebolt::rialto::MediaSourceType::AUDIO)
1310 : {
1311 : // This needs to be done before gstAppSrcPushBuffer() is
1312 : // called because it can free the memory
1313 7 : m_context.lastAudioSampleTimestamps = static_cast<int64_t>(GST_BUFFER_PTS(streamInfo.buffers.back()));
1314 : }
1315 :
1316 28 : for (GstBuffer *buffer : streamInfo.buffers)
1317 : {
1318 14 : m_gstWrapper->gstAppSrcPushBuffer(GST_APP_SRC(streamInfo.appSrc), buffer);
1319 : }
1320 14 : streamInfo.buffers.clear();
1321 14 : streamInfo.isDataPushed = true;
1322 :
1323 14 : const bool kIsSingle = m_context.streamInfo.size() == 1;
1324 14 : bool allOtherStreamsPushed = std::all_of(m_context.streamInfo.begin(), m_context.streamInfo.end(),
1325 15 : [](const auto &entry) { return entry.second.isDataPushed; });
1326 :
1327 14 : if (!m_context.bufferedNotificationSent && (allOtherStreamsPushed || kIsSingle) && m_gstPlayerClient)
1328 : {
1329 1 : m_context.bufferedNotificationSent = true;
1330 1 : m_gstPlayerClient->notifyNetworkState(NetworkState::BUFFERED);
1331 1 : RIALTO_SERVER_LOG_MIL("Buffered NetworkState reached");
1332 : }
1333 14 : cancelUnderflow(mediaType);
1334 :
1335 14 : const auto eosInfoIt = m_context.endOfStreamInfo.find(mediaType);
1336 14 : if (eosInfoIt != m_context.endOfStreamInfo.end() && eosInfoIt->second == EosState::PENDING)
1337 : {
1338 0 : setEos(mediaType);
1339 : }
1340 : }
1341 : }
1342 :
1343 7 : void GstGenericPlayer::updateAudioCaps(int32_t rate, int32_t channels, const std::shared_ptr<CodecData> &codecData)
1344 : {
1345 7 : auto elem = m_context.streamInfo.find(firebolt::rialto::MediaSourceType::AUDIO);
1346 7 : if (elem != m_context.streamInfo.end())
1347 : {
1348 6 : StreamInfo &streamInfo = elem->second;
1349 :
1350 6 : constexpr int kInvalidRate{0}, kInvalidChannels{0};
1351 6 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(streamInfo.appSrc));
1352 6 : GstCaps *newCaps = m_gstWrapper->gstCapsCopy(currentCaps);
1353 :
1354 6 : if (rate != kInvalidRate)
1355 : {
1356 3 : m_gstWrapper->gstCapsSetSimple(newCaps, "rate", G_TYPE_INT, rate, NULL);
1357 : }
1358 :
1359 6 : if (channels != kInvalidChannels)
1360 : {
1361 3 : m_gstWrapper->gstCapsSetSimple(newCaps, "channels", G_TYPE_INT, channels, NULL);
1362 : }
1363 :
1364 6 : setCodecData(newCaps, codecData);
1365 :
1366 6 : if (!m_gstWrapper->gstCapsIsEqual(currentCaps, newCaps))
1367 : {
1368 5 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(streamInfo.appSrc), newCaps);
1369 : }
1370 :
1371 6 : m_gstWrapper->gstCapsUnref(newCaps);
1372 6 : m_gstWrapper->gstCapsUnref(currentCaps);
1373 : }
1374 7 : }
1375 :
1376 8 : void GstGenericPlayer::updateVideoCaps(int32_t width, int32_t height, Fraction frameRate,
1377 : const std::shared_ptr<CodecData> &codecData)
1378 : {
1379 8 : auto elem = m_context.streamInfo.find(firebolt::rialto::MediaSourceType::VIDEO);
1380 8 : if (elem != m_context.streamInfo.end())
1381 : {
1382 7 : StreamInfo &streamInfo = elem->second;
1383 :
1384 7 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(streamInfo.appSrc));
1385 7 : GstCaps *newCaps = m_gstWrapper->gstCapsCopy(currentCaps);
1386 :
1387 7 : if (width > 0)
1388 : {
1389 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "width", G_TYPE_INT, width, NULL);
1390 : }
1391 :
1392 7 : if (height > 0)
1393 : {
1394 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "height", G_TYPE_INT, height, NULL);
1395 : }
1396 :
1397 7 : if ((kUndefinedSize != frameRate.numerator) && (kUndefinedSize != frameRate.denominator))
1398 : {
1399 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "framerate", GST_TYPE_FRACTION, frameRate.numerator,
1400 : frameRate.denominator, NULL);
1401 : }
1402 :
1403 7 : setCodecData(newCaps, codecData);
1404 :
1405 7 : if (!m_gstWrapper->gstCapsIsEqual(currentCaps, newCaps))
1406 : {
1407 6 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(streamInfo.appSrc), newCaps);
1408 : }
1409 :
1410 7 : m_gstWrapper->gstCapsUnref(currentCaps);
1411 7 : m_gstWrapper->gstCapsUnref(newCaps);
1412 : }
1413 8 : }
1414 :
1415 5 : void GstGenericPlayer::addAudioClippingToBuffer(GstBuffer *buffer, uint64_t clippingStart, uint64_t clippingEnd) const
1416 : {
1417 5 : if (clippingStart || clippingEnd)
1418 : {
1419 4 : if (m_gstWrapper->gstBufferAddAudioClippingMeta(buffer, GST_FORMAT_TIME, clippingStart, clippingEnd))
1420 : {
1421 3 : RIALTO_SERVER_LOG_DEBUG("Added audio clipping to buffer %p, start: %" PRIu64 ", end %" PRIu64, buffer,
1422 : clippingStart, clippingEnd);
1423 : }
1424 : else
1425 : {
1426 1 : RIALTO_SERVER_LOG_WARN("Failed to add audio clipping to buffer %p, start: %" PRIu64 ", end %" PRIu64,
1427 : buffer, clippingStart, clippingEnd);
1428 : }
1429 : }
1430 5 : }
1431 :
1432 13 : bool GstGenericPlayer::setCodecData(GstCaps *caps, const std::shared_ptr<CodecData> &codecData) const
1433 : {
1434 13 : if (codecData && CodecDataType::BUFFER == codecData->type)
1435 : {
1436 7 : gpointer memory = m_glibWrapper->gMemdup(codecData->data.data(), codecData->data.size());
1437 7 : GstBuffer *buf = m_gstWrapper->gstBufferNewWrapped(memory, codecData->data.size());
1438 7 : m_gstWrapper->gstCapsSetSimple(caps, "codec_data", GST_TYPE_BUFFER, buf, nullptr);
1439 7 : m_gstWrapper->gstBufferUnref(buf);
1440 7 : return true;
1441 : }
1442 6 : if (codecData && CodecDataType::STRING == codecData->type)
1443 : {
1444 2 : std::string codecDataStr(codecData->data.begin(), codecData->data.end());
1445 2 : m_gstWrapper->gstCapsSetSimple(caps, "codec_data", G_TYPE_STRING, codecDataStr.c_str(), nullptr);
1446 2 : return true;
1447 : }
1448 4 : return false;
1449 : }
1450 :
1451 12 : void GstGenericPlayer::pushSampleIfRequired(GstElement *source, const std::string &typeStr)
1452 : {
1453 12 : auto initialPosition = m_context.initialPositions.find(source);
1454 12 : if (m_context.initialPositions.end() == initialPosition)
1455 : {
1456 : // Sending initial sample not needed
1457 7 : return;
1458 : }
1459 : // GstAppSrc does not replace segment, if it's the same as previous one.
1460 : // It causes problems with position reporing in amlogic devices, so we need to push
1461 : // two segments with different reset time value.
1462 5 : pushAdditionalSegmentIfRequired(source);
1463 :
1464 10 : for (const auto &[position, resetTime, appliedRate, stopPosition] : initialPosition->second)
1465 : {
1466 6 : GstSeekFlags seekFlag = resetTime ? GST_SEEK_FLAG_FLUSH : GST_SEEK_FLAG_NONE;
1467 6 : RIALTO_SERVER_LOG_DEBUG("Pushing new %s sample...", typeStr.c_str());
1468 6 : GstSegment *segment{m_gstWrapper->gstSegmentNew()};
1469 6 : m_gstWrapper->gstSegmentInit(segment, GST_FORMAT_TIME);
1470 6 : if (!m_gstWrapper->gstSegmentDoSeek(segment, m_context.playbackRate, GST_FORMAT_TIME, seekFlag,
1471 : GST_SEEK_TYPE_SET, position, GST_SEEK_TYPE_SET, stopPosition, nullptr))
1472 : {
1473 1 : RIALTO_SERVER_LOG_WARN("Segment seek failed.");
1474 1 : m_gstWrapper->gstSegmentFree(segment);
1475 1 : m_context.initialPositions.erase(initialPosition);
1476 1 : return;
1477 : }
1478 5 : segment->applied_rate = appliedRate;
1479 5 : RIALTO_SERVER_LOG_MIL("New %s segment: [%" GST_TIME_FORMAT ", %" GST_TIME_FORMAT
1480 : "], rate: %f, appliedRate %f, reset_time: %d\n",
1481 : typeStr.c_str(), GST_TIME_ARGS(segment->start), GST_TIME_ARGS(segment->stop),
1482 : segment->rate, segment->applied_rate, resetTime);
1483 :
1484 5 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(source));
1485 : // We can't pass buffer in GstSample, because implementation of gst_app_src_push_sample
1486 : // uses gst_buffer_copy, which loses RialtoProtectionMeta (that causes problems with EME
1487 : // for first frame).
1488 5 : GstSample *sample = m_gstWrapper->gstSampleNew(nullptr, currentCaps, segment, nullptr);
1489 5 : m_gstWrapper->gstAppSrcPushSample(GST_APP_SRC(source), sample);
1490 5 : m_gstWrapper->gstSampleUnref(sample);
1491 5 : m_gstWrapper->gstCapsUnref(currentCaps);
1492 :
1493 5 : m_gstWrapper->gstSegmentFree(segment);
1494 : }
1495 4 : m_context.currentPosition[source] = initialPosition->second.back();
1496 4 : m_context.initialPositions.erase(initialPosition);
1497 4 : return;
1498 : }
1499 :
1500 5 : void GstGenericPlayer::pushAdditionalSegmentIfRequired(GstElement *source)
1501 : {
1502 5 : auto currentPosition = m_context.currentPosition.find(source);
1503 5 : if (m_context.currentPosition.end() == currentPosition)
1504 : {
1505 4 : return;
1506 : }
1507 1 : auto initialPosition = m_context.initialPositions.find(source);
1508 1 : if (m_context.initialPositions.end() == initialPosition)
1509 : {
1510 0 : return;
1511 : }
1512 2 : if (initialPosition->second.size() == 1 && initialPosition->second.back().resetTime &&
1513 1 : currentPosition->second == initialPosition->second.back())
1514 : {
1515 1 : RIALTO_SERVER_LOG_INFO("Adding additional segment with reset_time = false");
1516 1 : SegmentData additionalSegment = initialPosition->second.back();
1517 1 : additionalSegment.resetTime = false;
1518 1 : initialPosition->second.push_back(additionalSegment);
1519 : }
1520 : }
1521 :
1522 2 : void GstGenericPlayer::setTextTrackPositionIfRequired(GstElement *source)
1523 : {
1524 2 : auto initialPosition = m_context.initialPositions.find(source);
1525 2 : if (m_context.initialPositions.end() == initialPosition)
1526 : {
1527 : // Sending initial sample not needed
1528 1 : return;
1529 : }
1530 :
1531 1 : RIALTO_SERVER_LOG_MIL("New subtitle position set %" GST_TIME_FORMAT,
1532 : GST_TIME_ARGS(initialPosition->second.back().position));
1533 1 : m_glibWrapper->gObjectSet(m_context.subtitleSink, "position",
1534 1 : static_cast<guint64>(initialPosition->second.back().position), nullptr);
1535 :
1536 1 : m_context.initialPositions.erase(initialPosition);
1537 : }
1538 :
1539 9 : bool GstGenericPlayer::reattachSource(const std::unique_ptr<IMediaPipeline::MediaSource> &source)
1540 : {
1541 9 : if (m_context.streamInfo.find(source->getType()) == m_context.streamInfo.end())
1542 : {
1543 1 : RIALTO_SERVER_LOG_ERROR("Unable to switch source, type does not exist");
1544 1 : return false;
1545 : }
1546 8 : if (source->getMimeType().empty())
1547 : {
1548 1 : RIALTO_SERVER_LOG_WARN("Skip switch audio source. Unknown mime type");
1549 1 : return false;
1550 : }
1551 7 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate> audioAttributes{createAudioAttributes(source)};
1552 7 : if (!audioAttributes)
1553 : {
1554 1 : RIALTO_SERVER_LOG_ERROR("Failed to create audio attributes");
1555 1 : return false;
1556 : }
1557 :
1558 6 : long long currentDispPts = getPosition(m_context.pipeline); // NOLINT(runtime/int)
1559 6 : GstCaps *caps{createCapsFromMediaSource(m_gstWrapper, m_glibWrapper, source)};
1560 6 : GstAppSrc *appSrc{GST_APP_SRC(m_context.streamInfo[source->getType()].appSrc)};
1561 6 : GstCaps *oldCaps = m_gstWrapper->gstAppSrcGetCaps(appSrc);
1562 :
1563 6 : if ((!oldCaps) || (!m_gstWrapper->gstCapsIsEqual(caps, oldCaps)))
1564 : {
1565 5 : RIALTO_SERVER_LOG_DEBUG("Caps not equal. Perform audio track codec channel switch.");
1566 :
1567 5 : GstElement *sink = getSink(MediaSourceType::AUDIO);
1568 5 : if (!sink)
1569 : {
1570 0 : RIALTO_SERVER_LOG_ERROR("Failed to get audio sink");
1571 0 : if (caps)
1572 0 : m_gstWrapper->gstCapsUnref(caps);
1573 0 : if (oldCaps)
1574 0 : m_gstWrapper->gstCapsUnref(oldCaps);
1575 0 : return false;
1576 : }
1577 5 : std::string sinkName = GST_ELEMENT_NAME(sink);
1578 5 : m_gstWrapper->gstObjectUnref(sink);
1579 :
1580 5 : int sampleAttributes{
1581 : 0}; // rdk_gstreamer_utils::performAudioTrackCodecChannelSwitch checks if this param != NULL only.
1582 5 : std::uint32_t status{0}; // must be 0 to make rdk_gstreamer_utils::performAudioTrackCodecChannelSwitch work
1583 5 : unsigned int ui32Delay{0}; // output param
1584 5 : long long audioChangeTargetPts{-1}; // NOLINT(runtime/int) output param. Set audioChangeTargetPts =
1585 : // currentDispPts in rdk_gstreamer_utils function stub
1586 5 : unsigned int audioChangeStage{0}; // Output param. Set to AUDCHG_ALIGN in rdk_gstreamer_utils function stub
1587 5 : gchar *oldCapsCStr = m_gstWrapper->gstCapsToString(oldCaps);
1588 5 : std::string oldCapsStr = std::string(oldCapsCStr);
1589 5 : m_glibWrapper->gFree(oldCapsCStr);
1590 5 : bool audioAac{oldCapsStr.find("audio/mpeg") != std::string::npos};
1591 5 : bool svpEnabled{true}; // assume always true
1592 5 : bool retVal{false}; // Output param. Set to TRUE in rdk_gstreamer_utils function stub
1593 :
1594 5 : bool result = false;
1595 5 : if (m_glibWrapper->gStrHasPrefix(sinkName.c_str(), "amlhalasink"))
1596 : {
1597 : // due to problems audio codec change in prerolling, temporarily moved the code from rdk gstreamer utils to
1598 : // Rialto and applied fixes
1599 2 : result = performAudioTrackCodecChannelSwitch(&sampleAttributes, &(*audioAttributes), &status, &ui32Delay,
1600 : &audioChangeTargetPts, ¤tDispPts, &audioChangeStage,
1601 2 : &caps, &audioAac, svpEnabled, GST_ELEMENT(appSrc), &retVal);
1602 : }
1603 : else
1604 : {
1605 6 : result = m_rdkGstreamerUtilsWrapper->performAudioTrackCodecChannelSwitch(&m_context.playbackGroup,
1606 : &sampleAttributes,
1607 3 : &(*audioAttributes), &status,
1608 : &ui32Delay, &audioChangeTargetPts,
1609 : ¤tDispPts, &audioChangeStage,
1610 : &caps, &audioAac, svpEnabled,
1611 3 : GST_ELEMENT(appSrc), &retVal);
1612 : }
1613 :
1614 5 : if (!result || !retVal)
1615 : {
1616 3 : RIALTO_SERVER_LOG_WARN("performAudioTrackCodecChannelSwitch failed! Result: %d, retval %d", result, retVal);
1617 : }
1618 5 : }
1619 : else
1620 : {
1621 1 : RIALTO_SERVER_LOG_DEBUG("Skip switching audio source - caps are the same.");
1622 : }
1623 :
1624 6 : m_context.lastAudioSampleTimestamps = currentDispPts;
1625 6 : if (caps)
1626 6 : m_gstWrapper->gstCapsUnref(caps);
1627 6 : if (oldCaps)
1628 6 : m_gstWrapper->gstCapsUnref(oldCaps);
1629 :
1630 6 : return true;
1631 7 : }
1632 :
1633 0 : bool GstGenericPlayer::hasSourceType(const MediaSourceType &mediaSourceType) const
1634 : {
1635 0 : return m_context.streamInfo.find(mediaSourceType) != m_context.streamInfo.end();
1636 : }
1637 :
1638 91 : void GstGenericPlayer::scheduleNeedMediaData(GstAppSrc *src)
1639 : {
1640 91 : if (m_workerThread)
1641 : {
1642 91 : m_workerThread->enqueueTask(m_taskFactory->createNeedData(m_context, *this, src));
1643 : }
1644 : }
1645 :
1646 1 : void GstGenericPlayer::scheduleEnoughData(GstAppSrc *src)
1647 : {
1648 1 : if (m_workerThread)
1649 : {
1650 1 : m_workerThread->enqueueTask(m_taskFactory->createEnoughData(m_context, src));
1651 : }
1652 : }
1653 :
1654 2 : void GstGenericPlayer::scheduleAudioUnderflow()
1655 : {
1656 2 : if (m_workerThread)
1657 : {
1658 2 : bool underflowEnabled = m_context.isPlaying;
1659 4 : m_workerThread->enqueueTask(
1660 4 : m_taskFactory->createUnderflow(m_context, *this, underflowEnabled, MediaSourceType::AUDIO));
1661 : }
1662 2 : }
1663 :
1664 2 : void GstGenericPlayer::scheduleVideoUnderflow()
1665 : {
1666 2 : if (m_workerThread)
1667 : {
1668 2 : bool underflowEnabled = m_context.isPlaying;
1669 4 : m_workerThread->enqueueTask(
1670 4 : m_taskFactory->createUnderflow(m_context, *this, underflowEnabled, MediaSourceType::VIDEO));
1671 : }
1672 2 : }
1673 :
1674 1 : void GstGenericPlayer::scheduleAllSourcesAttached()
1675 : {
1676 1 : allSourcesAttached();
1677 : }
1678 :
1679 14 : void GstGenericPlayer::cancelUnderflow(firebolt::rialto::MediaSourceType mediaSource)
1680 : {
1681 14 : auto elem = m_context.streamInfo.find(mediaSource);
1682 14 : if (elem != m_context.streamInfo.end())
1683 : {
1684 14 : StreamInfo &streamInfo = elem->second;
1685 14 : if (!streamInfo.underflowOccured)
1686 : {
1687 11 : return;
1688 : }
1689 :
1690 3 : RIALTO_SERVER_LOG_DEBUG("Cancelling %s underflow", common::convertMediaSourceType(mediaSource));
1691 3 : streamInfo.underflowOccured = false;
1692 : }
1693 : }
1694 :
1695 3 : void GstGenericPlayer::play(bool &async)
1696 : {
1697 3 : if (0 == m_ongoingStateChangesNumber)
1698 : {
1699 : // Operation called on main thread, because PAUSED->PLAYING change is synchronous and needs to be done fast.
1700 : //
1701 : // m_context.pipeline can be used, because it's modified only in GstGenericPlayer
1702 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
1703 2 : ++m_ongoingStateChangesNumber;
1704 2 : async = (changePipelineState(GST_STATE_PLAYING) == GST_STATE_CHANGE_ASYNC);
1705 2 : RIALTO_SERVER_LOG_MIL("State change to PLAYING requested");
1706 : }
1707 : else
1708 : {
1709 1 : ++m_ongoingStateChangesNumber;
1710 1 : async = true;
1711 1 : if (m_workerThread)
1712 : {
1713 1 : m_workerThread->enqueueTask(m_taskFactory->createPlay(*this));
1714 : }
1715 : }
1716 3 : }
1717 :
1718 2 : void GstGenericPlayer::pause()
1719 : {
1720 2 : ++m_ongoingStateChangesNumber;
1721 2 : if (m_workerThread)
1722 : {
1723 2 : m_workerThread->enqueueTask(m_taskFactory->createPause(m_context, *this));
1724 : }
1725 : }
1726 :
1727 1 : void GstGenericPlayer::stop()
1728 : {
1729 1 : ++m_ongoingStateChangesNumber;
1730 1 : if (m_workerThread)
1731 : {
1732 1 : m_workerThread->enqueueTask(m_taskFactory->createStop(m_context, *this));
1733 : }
1734 : }
1735 :
1736 6 : GstStateChangeReturn GstGenericPlayer::changePipelineState(GstState newState)
1737 : {
1738 6 : if (!m_context.pipeline)
1739 : {
1740 1 : RIALTO_SERVER_LOG_ERROR("Change state failed - pipeline is nullptr");
1741 1 : if (m_gstPlayerClient)
1742 1 : m_gstPlayerClient->notifyPlaybackState(PlaybackState::FAILURE);
1743 1 : --m_ongoingStateChangesNumber;
1744 1 : return GST_STATE_CHANGE_FAILURE;
1745 : }
1746 5 : m_context.flushOnPrerollController->setTargetState(newState);
1747 5 : const GstStateChangeReturn result{m_gstWrapper->gstElementSetState(m_context.pipeline, newState)};
1748 5 : if (result == GST_STATE_CHANGE_FAILURE)
1749 : {
1750 1 : RIALTO_SERVER_LOG_ERROR("Change state failed - Gstreamer returned an error");
1751 1 : if (m_gstPlayerClient)
1752 1 : m_gstPlayerClient->notifyPlaybackState(PlaybackState::FAILURE);
1753 : }
1754 5 : --m_ongoingStateChangesNumber;
1755 5 : return result;
1756 : }
1757 :
1758 17 : int64_t GstGenericPlayer::getPosition(GstElement *element)
1759 : {
1760 17 : if (!element)
1761 : {
1762 1 : RIALTO_SERVER_LOG_WARN("Element is null");
1763 1 : return -1;
1764 : }
1765 :
1766 16 : m_gstWrapper->gstStateLock(element);
1767 :
1768 32 : if (m_gstWrapper->gstElementGetState(element) < GST_STATE_PAUSED ||
1769 16 : (m_gstWrapper->gstElementGetStateReturn(element) == GST_STATE_CHANGE_ASYNC &&
1770 1 : m_gstWrapper->gstElementGetStateNext(element) == GST_STATE_PAUSED))
1771 : {
1772 1 : RIALTO_SERVER_LOG_WARN("Element is prerolling or in invalid state - state: %s, return: %s, next: %s",
1773 : m_gstWrapper->gstElementStateGetName(m_gstWrapper->gstElementGetState(element)),
1774 : m_gstWrapper->gstElementStateChangeReturnGetName(
1775 : m_gstWrapper->gstElementGetStateReturn(element)),
1776 : m_gstWrapper->gstElementStateGetName(m_gstWrapper->gstElementGetStateNext(element)));
1777 :
1778 1 : m_gstWrapper->gstStateUnlock(element);
1779 1 : return -1;
1780 : }
1781 15 : m_gstWrapper->gstStateUnlock(element);
1782 :
1783 15 : gint64 position = -1;
1784 15 : if (!m_gstWrapper->gstElementQueryPosition(m_context.pipeline, GST_FORMAT_TIME, &position))
1785 : {
1786 1 : RIALTO_SERVER_LOG_WARN("Failed to query position");
1787 1 : return -1;
1788 : }
1789 :
1790 14 : return position;
1791 : }
1792 :
1793 1 : void GstGenericPlayer::setVideoGeometry(int x, int y, int width, int height)
1794 : {
1795 1 : if (m_workerThread)
1796 : {
1797 2 : m_workerThread->enqueueTask(
1798 2 : m_taskFactory->createSetVideoGeometry(m_context, *this, Rectangle{x, y, width, height}));
1799 : }
1800 1 : }
1801 :
1802 1 : void GstGenericPlayer::setEos(const firebolt::rialto::MediaSourceType &type)
1803 : {
1804 1 : if (m_workerThread)
1805 : {
1806 1 : m_workerThread->enqueueTask(m_taskFactory->createEos(m_context, *this, type));
1807 : }
1808 : }
1809 :
1810 4 : bool GstGenericPlayer::setVideoSinkRectangle()
1811 : {
1812 4 : bool result = false;
1813 4 : GstElement *videoSink{getSink(MediaSourceType::VIDEO)};
1814 4 : if (videoSink)
1815 : {
1816 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "rectangle"))
1817 : {
1818 : std::string rect =
1819 4 : std::to_string(m_context.pendingGeometry.x) + ',' + std::to_string(m_context.pendingGeometry.y) + ',' +
1820 6 : std::to_string(m_context.pendingGeometry.width) + ',' + std::to_string(m_context.pendingGeometry.height);
1821 2 : m_glibWrapper->gObjectSet(videoSink, "rectangle", rect.c_str(), nullptr);
1822 2 : m_context.pendingGeometry.clear();
1823 2 : result = true;
1824 : }
1825 : else
1826 : {
1827 1 : RIALTO_SERVER_LOG_ERROR("Failed to set the video rectangle");
1828 : }
1829 3 : m_gstWrapper->gstObjectUnref(videoSink);
1830 : }
1831 : else
1832 : {
1833 1 : RIALTO_SERVER_LOG_ERROR("Failed to set video rectangle, sink is NULL");
1834 : }
1835 :
1836 4 : return result;
1837 : }
1838 :
1839 3 : bool GstGenericPlayer::setImmediateOutput()
1840 : {
1841 3 : bool result{false};
1842 3 : if (m_context.pendingImmediateOutputForVideo.has_value())
1843 : {
1844 3 : GstElement *sink{getSink(MediaSourceType::VIDEO)};
1845 3 : if (sink)
1846 : {
1847 2 : bool immediateOutput{m_context.pendingImmediateOutputForVideo.value()};
1848 2 : RIALTO_SERVER_LOG_DEBUG("Set immediate-output to %s", immediateOutput ? "TRUE" : "FALSE");
1849 :
1850 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "immediate-output"))
1851 : {
1852 1 : gboolean immediateOutputGboolean{immediateOutput ? TRUE : FALSE};
1853 1 : m_glibWrapper->gObjectSet(sink, "immediate-output", immediateOutputGboolean, nullptr);
1854 1 : result = true;
1855 : }
1856 : else
1857 : {
1858 1 : RIALTO_SERVER_LOG_ERROR("Failed to set immediate-output property on sink '%s'", GST_ELEMENT_NAME(sink));
1859 : }
1860 2 : m_context.pendingImmediateOutputForVideo.reset();
1861 2 : m_gstWrapper->gstObjectUnref(sink);
1862 : }
1863 : else
1864 : {
1865 1 : RIALTO_SERVER_LOG_DEBUG("Pending an immediate-output, sink is NULL");
1866 : }
1867 : }
1868 3 : return result;
1869 : }
1870 :
1871 2 : bool GstGenericPlayer::setReportDecodeErrors()
1872 : {
1873 2 : bool result{false};
1874 2 : bool reportDecodeErrors{false};
1875 :
1876 : {
1877 2 : std::unique_lock lock{m_context.propertyMutex};
1878 2 : if (!m_context.pendingReportDecodeErrorsForVideo.has_value())
1879 : {
1880 0 : return false;
1881 : }
1882 2 : reportDecodeErrors = m_context.pendingReportDecodeErrorsForVideo.value();
1883 : }
1884 :
1885 2 : GstElement *decoder = getDecoder(MediaSourceType::VIDEO);
1886 2 : if (decoder)
1887 : {
1888 2 : RIALTO_SERVER_LOG_DEBUG("Set report decode errors to %s", reportDecodeErrors ? "TRUE" : "FALSE");
1889 :
1890 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "report-decode-errors"))
1891 : {
1892 1 : gboolean reportDecodeErrorsGboolean{reportDecodeErrors ? TRUE : FALSE};
1893 1 : m_glibWrapper->gObjectSet(decoder, "report-decode-errors", reportDecodeErrorsGboolean, nullptr);
1894 1 : result = true;
1895 : }
1896 : else
1897 : {
1898 1 : RIALTO_SERVER_LOG_ERROR("Failed to set report-decode-errors property on decoder '%s'",
1899 : GST_ELEMENT_NAME(decoder));
1900 : }
1901 :
1902 2 : m_gstWrapper->gstObjectUnref(decoder);
1903 :
1904 : {
1905 2 : std::unique_lock lock{m_context.propertyMutex};
1906 2 : m_context.pendingReportDecodeErrorsForVideo.reset();
1907 : }
1908 : }
1909 : else
1910 : {
1911 0 : RIALTO_SERVER_LOG_DEBUG("Pending report-decode-errors, decoder is NULL");
1912 : }
1913 2 : return result;
1914 : }
1915 :
1916 4 : bool GstGenericPlayer::setShowVideoWindow()
1917 : {
1918 4 : if (!m_context.pendingShowVideoWindow.has_value())
1919 : {
1920 1 : RIALTO_SERVER_LOG_WARN("No show video window value to be set. Aborting...");
1921 1 : return false;
1922 : }
1923 :
1924 3 : GstElement *videoSink{getSink(MediaSourceType::VIDEO)};
1925 3 : if (!videoSink)
1926 : {
1927 1 : RIALTO_SERVER_LOG_DEBUG("Setting show video window queued. Video sink is NULL");
1928 1 : return false;
1929 : }
1930 2 : bool result{false};
1931 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "show-video-window"))
1932 : {
1933 1 : m_glibWrapper->gObjectSet(videoSink, "show-video-window", m_context.pendingShowVideoWindow.value(), nullptr);
1934 1 : result = true;
1935 : }
1936 : else
1937 : {
1938 1 : RIALTO_SERVER_LOG_ERROR("Setting show video window failed. Property does not exist");
1939 : }
1940 2 : m_context.pendingShowVideoWindow.reset();
1941 2 : m_gstWrapper->gstObjectUnref(GST_OBJECT(videoSink));
1942 2 : return result;
1943 : }
1944 :
1945 4 : bool GstGenericPlayer::setLowLatency()
1946 : {
1947 4 : bool result{false};
1948 4 : if (m_context.pendingLowLatency.has_value())
1949 : {
1950 4 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1951 4 : if (sink)
1952 : {
1953 3 : bool lowLatency{m_context.pendingLowLatency.value()};
1954 3 : RIALTO_SERVER_LOG_DEBUG("Set low-latency to %s", lowLatency ? "TRUE" : "FALSE");
1955 :
1956 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "low-latency"))
1957 : {
1958 2 : gboolean lowLatencyGboolean{lowLatency ? TRUE : FALSE};
1959 2 : m_glibWrapper->gObjectSet(sink, "low-latency", lowLatencyGboolean, nullptr);
1960 2 : result = true;
1961 : }
1962 : else
1963 : {
1964 1 : RIALTO_SERVER_LOG_ERROR("Failed to set low-latency property on sink '%s'", GST_ELEMENT_NAME(sink));
1965 : }
1966 3 : m_context.pendingLowLatency.reset();
1967 3 : m_gstWrapper->gstObjectUnref(sink);
1968 : }
1969 : else
1970 : {
1971 1 : RIALTO_SERVER_LOG_DEBUG("Pending low-latency, sink is NULL");
1972 : }
1973 : }
1974 4 : return result;
1975 : }
1976 :
1977 3 : bool GstGenericPlayer::setSync()
1978 : {
1979 3 : bool result{false};
1980 3 : if (m_context.pendingSync.has_value())
1981 : {
1982 3 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1983 3 : if (sink)
1984 : {
1985 2 : bool sync{m_context.pendingSync.value()};
1986 2 : RIALTO_SERVER_LOG_DEBUG("Set sync to %s", sync ? "TRUE" : "FALSE");
1987 :
1988 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "sync"))
1989 : {
1990 1 : gboolean syncGboolean{sync ? TRUE : FALSE};
1991 1 : m_glibWrapper->gObjectSet(sink, "sync", syncGboolean, nullptr);
1992 1 : result = true;
1993 : }
1994 : else
1995 : {
1996 1 : RIALTO_SERVER_LOG_ERROR("Failed to set sync property on sink '%s'", GST_ELEMENT_NAME(sink));
1997 : }
1998 2 : m_context.pendingSync.reset();
1999 2 : m_gstWrapper->gstObjectUnref(sink);
2000 : }
2001 : else
2002 : {
2003 1 : RIALTO_SERVER_LOG_DEBUG("Pending sync, sink is NULL");
2004 : }
2005 : }
2006 3 : return result;
2007 : }
2008 :
2009 3 : bool GstGenericPlayer::setSyncOff()
2010 : {
2011 3 : bool result{false};
2012 3 : if (m_context.pendingSyncOff.has_value())
2013 : {
2014 3 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
2015 3 : if (decoder)
2016 : {
2017 2 : bool syncOff{m_context.pendingSyncOff.value()};
2018 2 : RIALTO_SERVER_LOG_DEBUG("Set sync-off to %s", syncOff ? "TRUE" : "FALSE");
2019 :
2020 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "sync-off"))
2021 : {
2022 1 : gboolean syncOffGboolean{decoder ? TRUE : FALSE};
2023 1 : m_glibWrapper->gObjectSet(decoder, "sync-off", syncOffGboolean, nullptr);
2024 1 : result = true;
2025 : }
2026 : else
2027 : {
2028 1 : RIALTO_SERVER_LOG_ERROR("Failed to set sync-off property on decoder '%s'", GST_ELEMENT_NAME(decoder));
2029 : }
2030 2 : m_context.pendingSyncOff.reset();
2031 2 : m_gstWrapper->gstObjectUnref(decoder);
2032 : }
2033 : else
2034 : {
2035 1 : RIALTO_SERVER_LOG_DEBUG("Pending sync-off, decoder is NULL");
2036 : }
2037 : }
2038 3 : return result;
2039 : }
2040 :
2041 6 : bool GstGenericPlayer::setStreamSyncMode(const MediaSourceType &type)
2042 : {
2043 6 : bool result{false};
2044 6 : int32_t streamSyncMode{0};
2045 : {
2046 6 : std::unique_lock lock{m_context.propertyMutex};
2047 6 : if (m_context.pendingStreamSyncMode.find(type) == m_context.pendingStreamSyncMode.end())
2048 : {
2049 0 : return false;
2050 : }
2051 6 : streamSyncMode = m_context.pendingStreamSyncMode[type];
2052 : }
2053 6 : if (MediaSourceType::AUDIO == type)
2054 : {
2055 3 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
2056 3 : if (!decoder)
2057 : {
2058 1 : RIALTO_SERVER_LOG_DEBUG("Pending stream-sync-mode, decoder is NULL");
2059 1 : return false;
2060 : }
2061 :
2062 2 : RIALTO_SERVER_LOG_DEBUG("Set stream-sync-mode to %d", streamSyncMode);
2063 :
2064 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "stream-sync-mode"))
2065 : {
2066 1 : gint streamSyncModeGint{static_cast<gint>(streamSyncMode)};
2067 1 : m_glibWrapper->gObjectSet(decoder, "stream-sync-mode", streamSyncModeGint, nullptr);
2068 1 : result = true;
2069 : }
2070 : else
2071 : {
2072 1 : RIALTO_SERVER_LOG_ERROR("Failed to set stream-sync-mode property on decoder '%s'", GST_ELEMENT_NAME(decoder));
2073 : }
2074 2 : m_gstWrapper->gstObjectUnref(decoder);
2075 2 : std::unique_lock lock{m_context.propertyMutex};
2076 2 : m_context.pendingStreamSyncMode.erase(type);
2077 : }
2078 3 : else if (MediaSourceType::VIDEO == type)
2079 : {
2080 3 : GstElement *parser = getParser(MediaSourceType::VIDEO);
2081 3 : if (!parser)
2082 : {
2083 1 : RIALTO_SERVER_LOG_DEBUG("Pending syncmode-streaming, parser is NULL");
2084 1 : return false;
2085 : }
2086 :
2087 2 : gboolean streamSyncModeBoolean{static_cast<gboolean>(streamSyncMode)};
2088 2 : RIALTO_SERVER_LOG_DEBUG("Set syncmode-streaming to %d", streamSyncMode);
2089 :
2090 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(parser), "syncmode-streaming"))
2091 : {
2092 1 : m_glibWrapper->gObjectSet(parser, "syncmode-streaming", streamSyncModeBoolean, nullptr);
2093 1 : result = true;
2094 : }
2095 : else
2096 : {
2097 1 : RIALTO_SERVER_LOG_ERROR("Failed to set syncmode-streaming property on parser '%s'", GST_ELEMENT_NAME(parser));
2098 : }
2099 2 : m_gstWrapper->gstObjectUnref(parser);
2100 2 : std::unique_lock lock{m_context.propertyMutex};
2101 2 : m_context.pendingStreamSyncMode.erase(type);
2102 : }
2103 4 : return result;
2104 : }
2105 :
2106 3 : bool GstGenericPlayer::setRenderFrame()
2107 : {
2108 3 : bool result{false};
2109 3 : if (m_context.pendingRenderFrame)
2110 : {
2111 5 : static const std::string kStepOnPrerollPropertyName = "frame-step-on-preroll";
2112 3 : GstElement *sink{getSink(MediaSourceType::VIDEO)};
2113 3 : if (sink)
2114 : {
2115 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), kStepOnPrerollPropertyName.c_str()))
2116 : {
2117 1 : RIALTO_SERVER_LOG_INFO("Rendering preroll");
2118 :
2119 1 : m_glibWrapper->gObjectSet(sink, kStepOnPrerollPropertyName.c_str(), 1, nullptr);
2120 1 : m_gstWrapper->gstElementSendEvent(sink, m_gstWrapper->gstEventNewStep(GST_FORMAT_BUFFERS, 1, 1.0, true,
2121 : false));
2122 1 : m_glibWrapper->gObjectSet(sink, kStepOnPrerollPropertyName.c_str(), 0, nullptr);
2123 1 : result = true;
2124 : }
2125 : else
2126 : {
2127 1 : RIALTO_SERVER_LOG_ERROR("Video sink doesn't have property `%s`", kStepOnPrerollPropertyName.c_str());
2128 : }
2129 2 : m_gstWrapper->gstObjectUnref(sink);
2130 2 : m_context.pendingRenderFrame = false;
2131 : }
2132 : else
2133 : {
2134 1 : RIALTO_SERVER_LOG_DEBUG("Pending render frame, sink is NULL");
2135 : }
2136 : }
2137 3 : return result;
2138 : }
2139 :
2140 3 : bool GstGenericPlayer::setBufferingLimit()
2141 : {
2142 3 : bool result{false};
2143 3 : guint bufferingLimit{0};
2144 : {
2145 3 : std::unique_lock lock{m_context.propertyMutex};
2146 3 : if (!m_context.pendingBufferingLimit.has_value())
2147 : {
2148 0 : return false;
2149 : }
2150 3 : bufferingLimit = static_cast<guint>(m_context.pendingBufferingLimit.value());
2151 : }
2152 :
2153 3 : GstElement *decoder{getDecoder(MediaSourceType::AUDIO)};
2154 3 : if (decoder)
2155 : {
2156 2 : RIALTO_SERVER_LOG_DEBUG("Set limit-buffering-ms to %u", bufferingLimit);
2157 :
2158 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "limit-buffering-ms"))
2159 : {
2160 1 : m_glibWrapper->gObjectSet(decoder, "limit-buffering-ms", bufferingLimit, nullptr);
2161 1 : result = true;
2162 : }
2163 : else
2164 : {
2165 1 : RIALTO_SERVER_LOG_ERROR("Failed to set limit-buffering-ms property on decoder '%s'",
2166 : GST_ELEMENT_NAME(decoder));
2167 : }
2168 2 : m_gstWrapper->gstObjectUnref(decoder);
2169 2 : std::unique_lock lock{m_context.propertyMutex};
2170 2 : m_context.pendingBufferingLimit.reset();
2171 : }
2172 : else
2173 : {
2174 1 : RIALTO_SERVER_LOG_DEBUG("Pending limit-buffering-ms, decoder is NULL");
2175 : }
2176 3 : return result;
2177 : }
2178 :
2179 2 : bool GstGenericPlayer::setUseBuffering()
2180 : {
2181 2 : std::unique_lock lock{m_context.propertyMutex};
2182 2 : if (m_context.pendingUseBuffering.has_value())
2183 : {
2184 2 : if (m_context.playbackGroup.m_curAudioDecodeBin)
2185 : {
2186 1 : gboolean useBufferingGboolean{m_context.pendingUseBuffering.value() ? TRUE : FALSE};
2187 1 : RIALTO_SERVER_LOG_DEBUG("Set use-buffering to %d", useBufferingGboolean);
2188 1 : m_glibWrapper->gObjectSet(m_context.playbackGroup.m_curAudioDecodeBin, "use-buffering",
2189 : useBufferingGboolean, nullptr);
2190 1 : m_context.pendingUseBuffering.reset();
2191 1 : return true;
2192 : }
2193 : else
2194 : {
2195 1 : RIALTO_SERVER_LOG_DEBUG("Pending use-buffering, decodebin is NULL");
2196 : }
2197 : }
2198 1 : return false;
2199 2 : }
2200 :
2201 8 : bool GstGenericPlayer::setWesterossinkSecondaryVideo()
2202 : {
2203 8 : bool result = false;
2204 8 : GstElementFactory *factory = m_gstWrapper->gstElementFactoryFind("westerossink");
2205 8 : if (factory)
2206 : {
2207 7 : GstElement *videoSink = m_gstWrapper->gstElementFactoryCreate(factory, nullptr);
2208 7 : if (videoSink)
2209 : {
2210 5 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "res-usage"))
2211 : {
2212 4 : m_glibWrapper->gObjectSet(videoSink, "res-usage", 0x0u, nullptr);
2213 4 : m_glibWrapper->gObjectSet(m_context.pipeline, "video-sink", videoSink, nullptr);
2214 4 : result = true;
2215 : }
2216 : else
2217 : {
2218 1 : RIALTO_SERVER_LOG_ERROR("Failed to set the westerossink res-usage");
2219 1 : m_gstWrapper->gstObjectUnref(GST_OBJECT(videoSink));
2220 : }
2221 : }
2222 : else
2223 : {
2224 2 : RIALTO_SERVER_LOG_ERROR("Failed to create the westerossink");
2225 : }
2226 :
2227 7 : m_gstWrapper->gstObjectUnref(GST_OBJECT(factory));
2228 : }
2229 : else
2230 : {
2231 : // No westeros sink
2232 1 : result = true;
2233 : }
2234 :
2235 8 : return result;
2236 : }
2237 :
2238 8 : bool GstGenericPlayer::setErmContext()
2239 : {
2240 8 : bool result = false;
2241 8 : GstContext *context = m_gstWrapper->gstContextNew("erm", false);
2242 8 : if (context)
2243 : {
2244 6 : GstStructure *contextStructure = m_gstWrapper->gstContextWritableStructure(context);
2245 6 : if (contextStructure)
2246 : {
2247 5 : m_gstWrapper->gstStructureSet(contextStructure, "res-usage", G_TYPE_UINT, 0x0u, nullptr);
2248 5 : m_gstWrapper->gstElementSetContext(GST_ELEMENT(m_context.pipeline), context);
2249 5 : result = true;
2250 : }
2251 : else
2252 : {
2253 1 : RIALTO_SERVER_LOG_ERROR("Failed to create the erm structure");
2254 : }
2255 6 : m_gstWrapper->gstContextUnref(context);
2256 : }
2257 : else
2258 : {
2259 2 : RIALTO_SERVER_LOG_ERROR("Failed to create the erm context");
2260 : }
2261 :
2262 8 : return result;
2263 : }
2264 :
2265 6 : void GstGenericPlayer::startPositionReportingAndCheckAudioUnderflowTimer()
2266 : {
2267 6 : if (m_positionReportingAndCheckAudioUnderflowTimer && m_positionReportingAndCheckAudioUnderflowTimer->isActive())
2268 : {
2269 1 : return;
2270 : }
2271 :
2272 15 : m_positionReportingAndCheckAudioUnderflowTimer = m_timerFactory->createTimer(
2273 : kPositionReportTimerMs,
2274 10 : [this]()
2275 : {
2276 1 : if (m_workerThread)
2277 : {
2278 1 : m_workerThread->enqueueTask(m_taskFactory->createReportPosition(m_context, *this));
2279 1 : m_workerThread->enqueueTask(m_taskFactory->createCheckAudioUnderflow(m_context, *this));
2280 : }
2281 1 : },
2282 5 : firebolt::rialto::common::TimerType::PERIODIC);
2283 : }
2284 :
2285 4 : void GstGenericPlayer::stopPositionReportingAndCheckAudioUnderflowTimer()
2286 : {
2287 4 : if (m_positionReportingAndCheckAudioUnderflowTimer && m_positionReportingAndCheckAudioUnderflowTimer->isActive())
2288 : {
2289 1 : m_positionReportingAndCheckAudioUnderflowTimer->cancel();
2290 1 : m_positionReportingAndCheckAudioUnderflowTimer.reset();
2291 : }
2292 4 : }
2293 :
2294 6 : void GstGenericPlayer::startNotifyPlaybackInfoTimer()
2295 : {
2296 : static constexpr std::chrono::milliseconds kPlaybackInfoTimerMs{32};
2297 6 : if (m_playbackInfoTimer && m_playbackInfoTimer->isActive())
2298 : {
2299 1 : return;
2300 : }
2301 :
2302 5 : notifyPlaybackInfo();
2303 :
2304 : m_playbackInfoTimer =
2305 5 : m_timerFactory
2306 6 : ->createTimer(kPlaybackInfoTimerMs, [this]() { notifyPlaybackInfo(); }, firebolt::rialto::common::TimerType::PERIODIC);
2307 : }
2308 :
2309 3 : void GstGenericPlayer::stopNotifyPlaybackInfoTimer()
2310 : {
2311 3 : if (m_playbackInfoTimer && m_playbackInfoTimer->isActive())
2312 : {
2313 1 : m_playbackInfoTimer->cancel();
2314 1 : m_playbackInfoTimer.reset();
2315 : }
2316 3 : }
2317 :
2318 0 : void GstGenericPlayer::startSubtitleClockResyncTimer()
2319 : {
2320 0 : if (m_subtitleClockResyncTimer && m_subtitleClockResyncTimer->isActive())
2321 : {
2322 0 : return;
2323 : }
2324 :
2325 0 : m_subtitleClockResyncTimer = m_timerFactory->createTimer(
2326 : kSubtitleClockResyncInterval,
2327 0 : [this]()
2328 : {
2329 0 : if (m_workerThread)
2330 : {
2331 0 : m_workerThread->enqueueTask(m_taskFactory->createSynchroniseSubtitleClock(m_context, *this));
2332 : }
2333 0 : },
2334 0 : firebolt::rialto::common::TimerType::PERIODIC);
2335 : }
2336 :
2337 0 : void GstGenericPlayer::stopSubtitleClockResyncTimer()
2338 : {
2339 0 : if (m_subtitleClockResyncTimer && m_subtitleClockResyncTimer->isActive())
2340 : {
2341 0 : m_subtitleClockResyncTimer->cancel();
2342 0 : m_subtitleClockResyncTimer.reset();
2343 : }
2344 : }
2345 :
2346 2 : void GstGenericPlayer::stopWorkerThread()
2347 : {
2348 2 : if (m_workerThread)
2349 : {
2350 2 : m_workerThread->stop();
2351 : }
2352 : }
2353 :
2354 0 : void GstGenericPlayer::setPendingPlaybackRate()
2355 : {
2356 0 : RIALTO_SERVER_LOG_INFO("Setting pending playback rate");
2357 0 : setPlaybackRate(m_context.pendingPlaybackRate);
2358 : }
2359 :
2360 1 : void GstGenericPlayer::renderFrame()
2361 : {
2362 1 : if (m_workerThread)
2363 : {
2364 1 : m_workerThread->enqueueTask(m_taskFactory->createRenderFrame(m_context, *this));
2365 : }
2366 : }
2367 :
2368 18 : void GstGenericPlayer::setVolume(double targetVolume, uint32_t volumeDuration, firebolt::rialto::EaseType easeType)
2369 : {
2370 18 : if (m_workerThread)
2371 : {
2372 36 : m_workerThread->enqueueTask(
2373 36 : m_taskFactory->createSetVolume(m_context, *this, targetVolume, volumeDuration, easeType));
2374 : }
2375 18 : }
2376 :
2377 9 : bool GstGenericPlayer::getVolume(double ¤tVolume)
2378 : {
2379 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
2380 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
2381 9 : if (!m_context.pipeline)
2382 : {
2383 0 : return false;
2384 : }
2385 :
2386 : // NOTE: No gstreamer documentation for "fade-volume" could be found at the time this code was written.
2387 : // Therefore the author performed several tests on a supported platform (Flex2) to determine the behaviour of this property.
2388 : // The code has been written to be backwardly compatible on platforms that don't have this property.
2389 : // The observed behaviour was:
2390 : // - if the returned fade volume is negative then audio-fade is not active. In this case the usual technique
2391 : // to find volume in the pipeline works and is used.
2392 : // - if the returned fade volume is positive then audio-fade is active. In this case the returned fade volume
2393 : // directly returns the current volume level 0=min to 100=max (and the pipeline's current volume level is
2394 : // meaningless and doesn't contribute in this case).
2395 9 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
2396 11 : if (m_context.audioFadeEnabled && sink &&
2397 2 : m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "fade-volume"))
2398 : {
2399 2 : gint fadeVolume{-100};
2400 2 : m_glibWrapper->gObjectGet(sink, "fade-volume", &fadeVolume, NULL);
2401 2 : if (fadeVolume < 0)
2402 : {
2403 1 : currentVolume = m_gstWrapper->gstStreamVolumeGetVolume(GST_STREAM_VOLUME(m_context.pipeline),
2404 : GST_STREAM_VOLUME_FORMAT_LINEAR);
2405 1 : RIALTO_SERVER_LOG_INFO("Fade volume is negative, using volume from pipeline: %f", currentVolume);
2406 : }
2407 : else
2408 : {
2409 1 : currentVolume = static_cast<double>(fadeVolume) / 100.0;
2410 1 : RIALTO_SERVER_LOG_INFO("Fade volume is supported: %f", currentVolume);
2411 : }
2412 : }
2413 : else
2414 : {
2415 7 : currentVolume = m_gstWrapper->gstStreamVolumeGetVolume(GST_STREAM_VOLUME(m_context.pipeline),
2416 : GST_STREAM_VOLUME_FORMAT_LINEAR);
2417 7 : RIALTO_SERVER_LOG_INFO("Fade volume is not supported, using volume from pipeline: %f", currentVolume);
2418 : }
2419 :
2420 9 : if (sink)
2421 2 : m_gstWrapper->gstObjectUnref(sink);
2422 :
2423 9 : return true;
2424 : }
2425 :
2426 1 : void GstGenericPlayer::setMute(const MediaSourceType &mediaSourceType, bool mute)
2427 : {
2428 1 : if (m_workerThread)
2429 : {
2430 1 : m_workerThread->enqueueTask(m_taskFactory->createSetMute(m_context, *this, mediaSourceType, mute));
2431 : }
2432 : }
2433 :
2434 5 : bool GstGenericPlayer::getMute(const MediaSourceType &mediaSourceType, bool &mute)
2435 : {
2436 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
2437 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
2438 5 : if (mediaSourceType == MediaSourceType::SUBTITLE)
2439 : {
2440 2 : if (!m_context.subtitleSink)
2441 : {
2442 1 : RIALTO_SERVER_LOG_ERROR("There is no subtitle sink");
2443 1 : return false;
2444 : }
2445 1 : gboolean muteValue{FALSE};
2446 1 : m_glibWrapper->gObjectGet(m_context.subtitleSink, "mute", &muteValue, nullptr);
2447 1 : mute = muteValue;
2448 : }
2449 3 : else if (mediaSourceType == MediaSourceType::AUDIO)
2450 : {
2451 2 : if (!m_context.pipeline)
2452 : {
2453 1 : return false;
2454 : }
2455 1 : mute = m_gstWrapper->gstStreamVolumeGetMute(GST_STREAM_VOLUME(m_context.pipeline));
2456 : }
2457 : else
2458 : {
2459 1 : RIALTO_SERVER_LOG_ERROR("Getting mute for type %s unsupported", common::convertMediaSourceType(mediaSourceType));
2460 1 : return false;
2461 : }
2462 :
2463 2 : return true;
2464 : }
2465 :
2466 2 : bool GstGenericPlayer::isAsync(const MediaSourceType &mediaSourceType) const
2467 : {
2468 2 : GstElement *sink = getSink(mediaSourceType);
2469 2 : if (!sink)
2470 : {
2471 0 : RIALTO_SERVER_LOG_WARN("Sink not found for %s", common::convertMediaSourceType(mediaSourceType));
2472 0 : return true; // Our sinks are async by default
2473 : }
2474 2 : gboolean returnValue{TRUE};
2475 2 : m_glibWrapper->gObjectGet(sink, "async", &returnValue, nullptr);
2476 2 : m_gstWrapper->gstObjectUnref(sink);
2477 2 : return returnValue == TRUE;
2478 : }
2479 :
2480 1 : void GstGenericPlayer::setTextTrackIdentifier(const std::string &textTrackIdentifier)
2481 : {
2482 1 : if (m_workerThread)
2483 : {
2484 1 : m_workerThread->enqueueTask(m_taskFactory->createSetTextTrackIdentifier(m_context, textTrackIdentifier));
2485 : }
2486 : }
2487 :
2488 3 : bool GstGenericPlayer::getTextTrackIdentifier(std::string &textTrackIdentifier)
2489 : {
2490 3 : if (!m_context.subtitleSink)
2491 : {
2492 1 : RIALTO_SERVER_LOG_ERROR("There is no subtitle sink");
2493 1 : return false;
2494 : }
2495 :
2496 2 : gchar *identifier = nullptr;
2497 2 : m_glibWrapper->gObjectGet(m_context.subtitleSink, "text-track-identifier", &identifier, nullptr);
2498 :
2499 2 : if (identifier)
2500 : {
2501 1 : textTrackIdentifier = identifier;
2502 1 : m_glibWrapper->gFree(identifier);
2503 1 : return true;
2504 : }
2505 : else
2506 : {
2507 1 : RIALTO_SERVER_LOG_ERROR("Failed to get text track identifier");
2508 1 : return false;
2509 : }
2510 : }
2511 :
2512 1 : bool GstGenericPlayer::setLowLatency(bool lowLatency)
2513 : {
2514 1 : if (m_workerThread)
2515 : {
2516 1 : m_workerThread->enqueueTask(m_taskFactory->createSetLowLatency(m_context, *this, lowLatency));
2517 : }
2518 1 : return true;
2519 : }
2520 :
2521 1 : bool GstGenericPlayer::setSync(bool sync)
2522 : {
2523 1 : if (m_workerThread)
2524 : {
2525 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSync(m_context, *this, sync));
2526 : }
2527 1 : return true;
2528 : }
2529 :
2530 4 : bool GstGenericPlayer::getSync(bool &sync)
2531 : {
2532 4 : bool returnValue{false};
2533 4 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
2534 4 : if (sink)
2535 : {
2536 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "sync"))
2537 : {
2538 1 : m_glibWrapper->gObjectGet(sink, "sync", &sync, nullptr);
2539 1 : returnValue = true;
2540 : }
2541 : else
2542 : {
2543 1 : RIALTO_SERVER_LOG_ERROR("Sync not supported in sink '%s'", GST_ELEMENT_NAME(sink));
2544 : }
2545 2 : m_gstWrapper->gstObjectUnref(sink);
2546 : }
2547 2 : else if (m_context.pendingSync.has_value())
2548 : {
2549 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
2550 1 : sync = m_context.pendingSync.value();
2551 1 : returnValue = true;
2552 : }
2553 : else
2554 : {
2555 : // We dont know the default setting on the sync, so return failure here
2556 1 : RIALTO_SERVER_LOG_WARN("No audio sink attached or queued value");
2557 : }
2558 :
2559 4 : return returnValue;
2560 : }
2561 :
2562 1 : bool GstGenericPlayer::setSyncOff(bool syncOff)
2563 : {
2564 1 : if (m_workerThread)
2565 : {
2566 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSyncOff(m_context, *this, syncOff));
2567 : }
2568 1 : return true;
2569 : }
2570 :
2571 1 : bool GstGenericPlayer::setStreamSyncMode(const MediaSourceType &mediaSourceType, int32_t streamSyncMode)
2572 : {
2573 1 : if (m_workerThread)
2574 : {
2575 2 : m_workerThread->enqueueTask(
2576 2 : m_taskFactory->createSetStreamSyncMode(m_context, *this, mediaSourceType, streamSyncMode));
2577 : }
2578 1 : return true;
2579 : }
2580 :
2581 5 : bool GstGenericPlayer::getStreamSyncMode(int32_t &streamSyncMode)
2582 : {
2583 5 : bool returnValue{false};
2584 5 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
2585 5 : if (decoder && m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "stream-sync-mode"))
2586 : {
2587 2 : m_glibWrapper->gObjectGet(decoder, "stream-sync-mode", &streamSyncMode, nullptr);
2588 2 : returnValue = true;
2589 : }
2590 : else
2591 : {
2592 3 : std::unique_lock lock{m_context.propertyMutex};
2593 3 : if (m_context.pendingStreamSyncMode.find(MediaSourceType::AUDIO) != m_context.pendingStreamSyncMode.end())
2594 : {
2595 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
2596 1 : streamSyncMode = m_context.pendingStreamSyncMode[MediaSourceType::AUDIO];
2597 1 : returnValue = true;
2598 : }
2599 : else
2600 : {
2601 2 : RIALTO_SERVER_LOG_ERROR("Stream sync mode not supported in decoder '%s'",
2602 : (decoder ? GST_ELEMENT_NAME(decoder) : "null"));
2603 : }
2604 3 : }
2605 :
2606 5 : if (decoder)
2607 3 : m_gstWrapper->gstObjectUnref(GST_OBJECT(decoder));
2608 :
2609 5 : return returnValue;
2610 : }
2611 :
2612 1 : void GstGenericPlayer::ping(std::unique_ptr<IHeartbeatHandler> &&heartbeatHandler)
2613 : {
2614 1 : if (m_workerThread)
2615 : {
2616 1 : m_workerThread->enqueueTask(m_taskFactory->createPing(std::move(heartbeatHandler)));
2617 : }
2618 : }
2619 :
2620 2 : void GstGenericPlayer::flush(const MediaSourceType &mediaSourceType, bool resetTime, bool &async)
2621 : {
2622 2 : if (m_workerThread)
2623 : {
2624 2 : async = isAsync(mediaSourceType);
2625 2 : m_flushWatcher->setFlushing(mediaSourceType, async);
2626 2 : m_workerThread->enqueueTask(m_taskFactory->createFlush(m_context, *this, mediaSourceType, resetTime, async));
2627 : }
2628 : }
2629 :
2630 1 : void GstGenericPlayer::setSourcePosition(const MediaSourceType &mediaSourceType, int64_t position, bool resetTime,
2631 : double appliedRate, uint64_t stopPosition)
2632 : {
2633 1 : if (m_workerThread)
2634 : {
2635 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSourcePosition(m_context, mediaSourceType, position,
2636 : resetTime, appliedRate, stopPosition));
2637 : }
2638 : }
2639 :
2640 0 : void GstGenericPlayer::setSubtitleOffset(int64_t position)
2641 : {
2642 0 : if (m_workerThread)
2643 : {
2644 0 : m_workerThread->enqueueTask(m_taskFactory->createSetSubtitleOffset(m_context, position));
2645 : }
2646 : }
2647 :
2648 1 : void GstGenericPlayer::processAudioGap(int64_t position, uint32_t duration, int64_t discontinuityGap, bool audioAac)
2649 : {
2650 1 : if (m_workerThread)
2651 : {
2652 2 : m_workerThread->enqueueTask(
2653 2 : m_taskFactory->createProcessAudioGap(m_context, position, duration, discontinuityGap, audioAac));
2654 : }
2655 1 : }
2656 :
2657 1 : void GstGenericPlayer::setBufferingLimit(uint32_t limitBufferingMs)
2658 : {
2659 1 : if (m_workerThread)
2660 : {
2661 1 : m_workerThread->enqueueTask(m_taskFactory->createSetBufferingLimit(m_context, *this, limitBufferingMs));
2662 : }
2663 : }
2664 :
2665 5 : bool GstGenericPlayer::getBufferingLimit(uint32_t &limitBufferingMs)
2666 : {
2667 5 : bool returnValue{false};
2668 5 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
2669 5 : if (decoder && m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "limit-buffering-ms"))
2670 : {
2671 2 : m_glibWrapper->gObjectGet(decoder, "limit-buffering-ms", &limitBufferingMs, nullptr);
2672 2 : returnValue = true;
2673 : }
2674 : else
2675 : {
2676 3 : std::unique_lock lock{m_context.propertyMutex};
2677 3 : if (m_context.pendingBufferingLimit.has_value())
2678 : {
2679 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
2680 1 : limitBufferingMs = m_context.pendingBufferingLimit.value();
2681 1 : returnValue = true;
2682 : }
2683 : else
2684 : {
2685 2 : RIALTO_SERVER_LOG_ERROR("buffering limit not supported in decoder '%s'",
2686 : (decoder ? GST_ELEMENT_NAME(decoder) : "null"));
2687 : }
2688 3 : }
2689 :
2690 5 : if (decoder)
2691 3 : m_gstWrapper->gstObjectUnref(GST_OBJECT(decoder));
2692 :
2693 5 : return returnValue;
2694 : }
2695 :
2696 1 : void GstGenericPlayer::setUseBuffering(bool useBuffering)
2697 : {
2698 1 : if (m_workerThread)
2699 : {
2700 1 : m_workerThread->enqueueTask(m_taskFactory->createSetUseBuffering(m_context, *this, useBuffering));
2701 : }
2702 : }
2703 :
2704 3 : bool GstGenericPlayer::getUseBuffering(bool &useBuffering)
2705 : {
2706 3 : if (m_context.playbackGroup.m_curAudioDecodeBin)
2707 : {
2708 1 : m_glibWrapper->gObjectGet(m_context.playbackGroup.m_curAudioDecodeBin, "use-buffering", &useBuffering, nullptr);
2709 1 : return true;
2710 : }
2711 : else
2712 : {
2713 2 : std::unique_lock lock{m_context.propertyMutex};
2714 2 : if (m_context.pendingUseBuffering.has_value())
2715 : {
2716 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
2717 1 : useBuffering = m_context.pendingUseBuffering.value();
2718 1 : return true;
2719 : }
2720 2 : }
2721 1 : return false;
2722 : }
2723 :
2724 1 : void GstGenericPlayer::switchSource(const std::unique_ptr<IMediaPipeline::MediaSource> &mediaSource)
2725 : {
2726 1 : if (m_workerThread)
2727 : {
2728 1 : m_workerThread->enqueueTask(m_taskFactory->createSwitchSource(*this, mediaSource));
2729 : }
2730 : }
2731 :
2732 1 : void GstGenericPlayer::handleBusMessage(GstMessage *message)
2733 : {
2734 1 : m_workerThread->enqueueTask(m_taskFactory->createHandleBusMessage(m_context, *this, message, *m_flushWatcher));
2735 : }
2736 :
2737 1 : void GstGenericPlayer::updatePlaybackGroup(GstElement *typefind, const GstCaps *caps)
2738 : {
2739 1 : m_workerThread->enqueueTask(m_taskFactory->createUpdatePlaybackGroup(m_context, *this, typefind, caps));
2740 : }
2741 :
2742 3 : void GstGenericPlayer::addAutoVideoSinkChild(GObject *object)
2743 : {
2744 : // Only add children that are sinks
2745 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2746 : {
2747 2 : RIALTO_SERVER_LOG_DEBUG("Store AutoVideoSink child sink");
2748 :
2749 2 : if (m_context.autoVideoChildSink && m_context.autoVideoChildSink != GST_ELEMENT(object))
2750 : {
2751 1 : RIALTO_SERVER_LOG_MIL("AutoVideoSink child is been overwritten");
2752 : }
2753 2 : m_context.autoVideoChildSink = GST_ELEMENT(object);
2754 : }
2755 3 : }
2756 :
2757 3 : void GstGenericPlayer::addAutoAudioSinkChild(GObject *object)
2758 : {
2759 : // Only add children that are sinks
2760 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2761 : {
2762 2 : RIALTO_SERVER_LOG_DEBUG("Store AutoAudioSink child sink");
2763 :
2764 2 : if (m_context.autoAudioChildSink && m_context.autoAudioChildSink != GST_ELEMENT(object))
2765 : {
2766 1 : RIALTO_SERVER_LOG_MIL("AutoAudioSink child is been overwritten");
2767 : }
2768 2 : m_context.autoAudioChildSink = GST_ELEMENT(object);
2769 : }
2770 3 : }
2771 :
2772 3 : void GstGenericPlayer::removeAutoVideoSinkChild(GObject *object)
2773 : {
2774 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2775 : {
2776 3 : RIALTO_SERVER_LOG_DEBUG("Remove AutoVideoSink child sink");
2777 :
2778 3 : if (m_context.autoVideoChildSink && m_context.autoVideoChildSink != GST_ELEMENT(object))
2779 : {
2780 1 : RIALTO_SERVER_LOG_MIL("AutoVideoSink child sink is not the same as the one stored");
2781 1 : return;
2782 : }
2783 :
2784 2 : m_context.autoVideoChildSink = nullptr;
2785 : }
2786 : }
2787 :
2788 3 : void GstGenericPlayer::removeAutoAudioSinkChild(GObject *object)
2789 : {
2790 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2791 : {
2792 3 : RIALTO_SERVER_LOG_DEBUG("Remove AutoAudioSink child sink");
2793 :
2794 3 : if (m_context.autoAudioChildSink && m_context.autoAudioChildSink != GST_ELEMENT(object))
2795 : {
2796 1 : RIALTO_SERVER_LOG_MIL("AutoAudioSink child sink is not the same as the one stored");
2797 1 : return;
2798 : }
2799 :
2800 2 : m_context.autoAudioChildSink = nullptr;
2801 : }
2802 : }
2803 :
2804 14 : GstElement *GstGenericPlayer::getSinkChildIfAutoVideoSink(GstElement *sink) const
2805 : {
2806 14 : const gchar *kTmpName = m_glibWrapper->gTypeName(G_OBJECT_TYPE(sink));
2807 14 : if (!kTmpName)
2808 0 : return sink;
2809 :
2810 28 : const std::string kElementTypeName{kTmpName};
2811 14 : if (kElementTypeName == "GstAutoVideoSink")
2812 : {
2813 1 : if (!m_context.autoVideoChildSink)
2814 : {
2815 0 : RIALTO_SERVER_LOG_WARN("No child sink has been added to the autovideosink");
2816 : }
2817 : else
2818 : {
2819 1 : return m_context.autoVideoChildSink;
2820 : }
2821 : }
2822 13 : return sink;
2823 14 : }
2824 :
2825 16 : GstElement *GstGenericPlayer::getSinkChildIfAutoAudioSink(GstElement *sink) const
2826 : {
2827 16 : const gchar *kTmpName = m_glibWrapper->gTypeName(G_OBJECT_TYPE(sink));
2828 16 : if (!kTmpName)
2829 0 : return sink;
2830 :
2831 32 : const std::string kElementTypeName{kTmpName};
2832 16 : if (kElementTypeName == "GstAutoAudioSink")
2833 : {
2834 1 : if (!m_context.autoAudioChildSink)
2835 : {
2836 0 : RIALTO_SERVER_LOG_WARN("No child sink has been added to the autoaudiosink");
2837 : }
2838 : else
2839 : {
2840 1 : return m_context.autoAudioChildSink;
2841 : }
2842 : }
2843 15 : return sink;
2844 16 : }
2845 :
2846 220 : void GstGenericPlayer::setPlaybinFlags(bool enableAudio)
2847 : {
2848 220 : unsigned flags = getGstPlayFlag("video") | getGstPlayFlag("native-video") | getGstPlayFlag("text");
2849 :
2850 220 : if (enableAudio)
2851 : {
2852 220 : flags |= getGstPlayFlag("audio");
2853 220 : flags |= shouldEnableNativeAudio() ? getGstPlayFlag("native-audio") : 0;
2854 : }
2855 :
2856 220 : m_glibWrapper->gObjectSet(m_context.pipeline, "flags", flags, nullptr);
2857 : }
2858 :
2859 220 : bool GstGenericPlayer::shouldEnableNativeAudio()
2860 : {
2861 220 : GstElementFactory *factory = m_gstWrapper->gstElementFactoryFind("brcmaudiosink");
2862 220 : if (factory)
2863 : {
2864 1 : m_gstWrapper->gstObjectUnref(GST_OBJECT(factory));
2865 1 : return true;
2866 : }
2867 219 : return false;
2868 : }
2869 :
2870 : }; // namespace firebolt::rialto::server
|