Line data Source code
1 : /*
2 : * If not stated otherwise in this file or this component's LICENSE file the
3 : * following copyright and licenses apply:
4 : *
5 : * Copyright 2022 Sky UK
6 : *
7 : * Licensed under the Apache License, Version 2.0 (the "License");
8 : * you may not use this file except in compliance with the License.
9 : * You may obtain a copy of the License at
10 : *
11 : * http://www.apache.org/licenses/LICENSE-2.0
12 : *
13 : * Unless required by applicable law or agreed to in writing, software
14 : * distributed under the License is distributed on an "AS IS" BASIS,
15 : * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 : * See the License for the specific language governing permissions and
17 : * limitations under the License.
18 : */
19 :
20 : #include <chrono>
21 : #include <cinttypes>
22 : #include <cstring>
23 : #include <ctime>
24 : #include <stdexcept>
25 :
26 : #include "FlushWatcher.h"
27 : #include "GstDispatcherThread.h"
28 : #include "GstGenericPlayer.h"
29 : #include "GstProtectionMetadata.h"
30 : #include "IGstTextTrackSinkFactory.h"
31 : #include "IMediaPipeline.h"
32 : #include "ITimer.h"
33 : #include "RialtoServerLogging.h"
34 : #include "TypeConverters.h"
35 : #include "Utils.h"
36 : #include "WorkerThread.h"
37 : #include "tasks/generic/GenericPlayerTaskFactory.h"
38 :
39 : namespace
40 : {
41 : /**
42 : * @brief Report position interval in ms.
43 : * The position reporting timer should be started whenever the PLAYING state is entered and stopped
44 : * whenever the session moves to another playback state.
45 : */
46 : constexpr std::chrono::milliseconds kPositionReportTimerMs{250};
47 : constexpr std::chrono::seconds kSubtitleClockResyncInterval{10};
48 :
49 1 : bool operator==(const firebolt::rialto::server::SegmentData &lhs, const firebolt::rialto::server::SegmentData &rhs)
50 : {
51 2 : return (lhs.position == rhs.position) && (lhs.resetTime == rhs.resetTime) && (lhs.appliedRate == rhs.appliedRate) &&
52 2 : (lhs.stopPosition == rhs.stopPosition);
53 : }
54 : } // namespace
55 :
56 : namespace firebolt::rialto::server
57 : {
58 : std::weak_ptr<IGstGenericPlayerFactory> GstGenericPlayerFactory::m_factory;
59 :
60 3 : std::shared_ptr<IGstGenericPlayerFactory> IGstGenericPlayerFactory::getFactory()
61 : {
62 3 : std::shared_ptr<IGstGenericPlayerFactory> factory = GstGenericPlayerFactory::m_factory.lock();
63 :
64 3 : if (!factory)
65 : {
66 : try
67 : {
68 3 : factory = std::make_shared<GstGenericPlayerFactory>();
69 : }
70 0 : catch (const std::exception &e)
71 : {
72 0 : RIALTO_SERVER_LOG_ERROR("Failed to create the gstreamer player factory, reason: %s", e.what());
73 : }
74 :
75 3 : GstGenericPlayerFactory::m_factory = factory;
76 : }
77 :
78 3 : return factory;
79 : }
80 :
81 1 : std::unique_ptr<IGstGenericPlayer> GstGenericPlayerFactory::createGstGenericPlayer(
82 : IGstGenericPlayerClient *client, IDecryptionService &decryptionService, MediaType type,
83 : const VideoRequirements &videoRequirements, bool isLive,
84 : const std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapperFactory> &rdkGstreamerUtilsWrapperFactory)
85 : {
86 1 : std::unique_ptr<IGstGenericPlayer> gstPlayer;
87 :
88 : try
89 : {
90 1 : auto gstWrapperFactory = firebolt::rialto::wrappers::IGstWrapperFactory::getFactory();
91 1 : auto glibWrapperFactory = firebolt::rialto::wrappers::IGlibWrapperFactory::getFactory();
92 1 : std::shared_ptr<firebolt::rialto::wrappers::IGstWrapper> gstWrapper;
93 1 : std::shared_ptr<firebolt::rialto::wrappers::IGlibWrapper> glibWrapper;
94 1 : std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapper> rdkGstreamerUtilsWrapper;
95 1 : if ((!gstWrapperFactory) || (!(gstWrapper = gstWrapperFactory->getGstWrapper())))
96 : {
97 0 : throw std::runtime_error("Cannot create GstWrapper");
98 : }
99 1 : if ((!glibWrapperFactory) || (!(glibWrapper = glibWrapperFactory->getGlibWrapper())))
100 : {
101 0 : throw std::runtime_error("Cannot create GlibWrapper");
102 : }
103 2 : if ((!rdkGstreamerUtilsWrapperFactory) ||
104 2 : (!(rdkGstreamerUtilsWrapper = rdkGstreamerUtilsWrapperFactory->createRdkGstreamerUtilsWrapper())))
105 : {
106 0 : throw std::runtime_error("Cannot create RdkGstreamerUtilsWrapper");
107 : }
108 : gstPlayer = std::make_unique<
109 2 : GstGenericPlayer>(client, decryptionService, type, videoRequirements, isLive, gstWrapper, glibWrapper,
110 2 : rdkGstreamerUtilsWrapper, IGstInitialiser::instance(), std::make_unique<FlushWatcher>(),
111 2 : IGstSrcFactory::getFactory(), common::ITimerFactory::getFactory(),
112 2 : std::make_unique<GenericPlayerTaskFactory>(client, gstWrapper, glibWrapper,
113 : rdkGstreamerUtilsWrapper,
114 2 : IGstTextTrackSinkFactory::createFactory()),
115 2 : std::make_unique<WorkerThreadFactory>(), std::make_unique<GstDispatcherThreadFactory>(),
116 3 : IGstProtectionMetadataHelperFactory::createFactory());
117 1 : }
118 0 : catch (const std::exception &e)
119 : {
120 0 : RIALTO_SERVER_LOG_ERROR("Failed to create the gstreamer player, reason: %s", e.what());
121 : }
122 :
123 1 : return gstPlayer;
124 : }
125 :
126 226 : GstGenericPlayer::GstGenericPlayer(
127 : IGstGenericPlayerClient *client, IDecryptionService &decryptionService, MediaType type,
128 : const VideoRequirements &videoRequirements, bool isLive,
129 : const std::shared_ptr<firebolt::rialto::wrappers::IGstWrapper> &gstWrapper,
130 : const std::shared_ptr<firebolt::rialto::wrappers::IGlibWrapper> &glibWrapper,
131 : const std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapper> &rdkGstreamerUtilsWrapper,
132 : const IGstInitialiser &gstInitialiser, std::unique_ptr<IFlushWatcher> &&flushWatcher,
133 : const std::shared_ptr<IGstSrcFactory> &gstSrcFactory, std::shared_ptr<common::ITimerFactory> timerFactory,
134 : std::unique_ptr<IGenericPlayerTaskFactory> taskFactory, std::unique_ptr<IWorkerThreadFactory> workerThreadFactory,
135 : std::unique_ptr<IGstDispatcherThreadFactory> gstDispatcherThreadFactory,
136 226 : std::shared_ptr<IGstProtectionMetadataHelperFactory> gstProtectionMetadataFactory)
137 226 : : m_gstPlayerClient(client), m_gstWrapper{gstWrapper}, m_glibWrapper{glibWrapper},
138 452 : m_rdkGstreamerUtilsWrapper{rdkGstreamerUtilsWrapper}, m_timerFactory{timerFactory},
139 678 : m_taskFactory{std::move(taskFactory)}, m_flushWatcher{std::move(flushWatcher)}
140 : {
141 226 : RIALTO_SERVER_LOG_DEBUG("GstGenericPlayer is constructed.");
142 :
143 226 : gstInitialiser.waitForInitialisation();
144 :
145 226 : m_context.isLive = isLive;
146 226 : m_context.decryptionService = &decryptionService;
147 :
148 226 : if ((!gstSrcFactory) || (!(m_context.gstSrc = gstSrcFactory->getGstSrc())))
149 : {
150 2 : throw std::runtime_error("Cannot create GstSrc");
151 : }
152 :
153 224 : if (!timerFactory)
154 : {
155 1 : throw std::runtime_error("TimeFactory is invalid");
156 : }
157 :
158 446 : if ((!gstProtectionMetadataFactory) ||
159 446 : (!(m_protectionMetadataWrapper = gstProtectionMetadataFactory->createProtectionMetadataWrapper(m_gstWrapper))))
160 : {
161 0 : throw std::runtime_error("Cannot create protection metadata wrapper");
162 : }
163 :
164 : // Ensure that rialtosrc has been initalised
165 223 : m_context.gstSrc->initSrc();
166 :
167 : // Start task thread
168 223 : if ((!workerThreadFactory) || (!(m_workerThread = workerThreadFactory->createWorkerThread())))
169 : {
170 0 : throw std::runtime_error("Failed to create the worker thread");
171 : }
172 :
173 : // Initialise pipeline
174 223 : switch (type)
175 : {
176 222 : case MediaType::MSE:
177 : {
178 222 : initMsePipeline();
179 222 : break;
180 : }
181 1 : default:
182 : {
183 1 : resetWorkerThread();
184 1 : throw std::runtime_error("Media type not supported");
185 : }
186 : }
187 :
188 : // Check the video requirements for a limited video.
189 : // If the video requirements are set to anything lower than the minimum, this playback is assumed to be a secondary
190 : // video in a dual video scenario.
191 222 : if ((kMinPrimaryVideoWidth > videoRequirements.maxWidth) || (kMinPrimaryVideoHeight > videoRequirements.maxHeight))
192 : {
193 8 : RIALTO_SERVER_LOG_MIL("Secondary video playback selected");
194 8 : bool westerossinkSecondaryVideoResult = setWesterossinkSecondaryVideo();
195 8 : bool ermContextResult = setErmContext();
196 8 : if (!westerossinkSecondaryVideoResult && !ermContextResult)
197 : {
198 1 : resetWorkerThread();
199 1 : termPipeline();
200 1 : throw std::runtime_error("Could not set secondary video");
201 : }
202 7 : }
203 : else
204 : {
205 214 : RIALTO_SERVER_LOG_MIL("Primary video playback selected");
206 : }
207 :
208 442 : m_gstDispatcherThread = gstDispatcherThreadFactory->createGstDispatcherThread(*this, m_context.pipeline,
209 221 : m_context.flushOnPrerollController,
210 221 : m_gstWrapper);
211 306 : }
212 :
213 442 : GstGenericPlayer::~GstGenericPlayer()
214 : {
215 221 : RIALTO_SERVER_LOG_DEBUG("GstGenericPlayer is destructed.");
216 221 : m_gstDispatcherThread.reset();
217 :
218 221 : resetWorkerThread();
219 :
220 221 : termPipeline();
221 442 : }
222 :
223 222 : void GstGenericPlayer::initMsePipeline()
224 : {
225 : // Make playbin
226 222 : m_context.pipeline = m_gstWrapper->gstElementFactoryMake("playbin", "media_pipeline");
227 : // Set pipeline flags
228 222 : setPlaybinFlags(true);
229 :
230 : // Set callbacks
231 222 : m_glibWrapper->gSignalConnect(m_context.pipeline, "source-setup", G_CALLBACK(&GstGenericPlayer::setupSource), this);
232 222 : m_glibWrapper->gSignalConnect(m_context.pipeline, "element-setup", G_CALLBACK(&GstGenericPlayer::setupElement), this);
233 222 : m_glibWrapper->gSignalConnect(m_context.pipeline, "deep-element-added",
234 : G_CALLBACK(&GstGenericPlayer::deepElementAdded), this);
235 :
236 : // Set uri
237 222 : m_glibWrapper->gObjectSet(m_context.pipeline, "uri", "rialto://", nullptr);
238 :
239 : // Check playsink
240 222 : GstElement *playsink = (m_gstWrapper->gstBinGetByName(GST_BIN(m_context.pipeline), "playsink"));
241 222 : if (playsink)
242 : {
243 221 : m_glibWrapper->gObjectSet(G_OBJECT(playsink), "send-event-mode", 0, nullptr);
244 221 : m_gstWrapper->gstObjectUnref(playsink);
245 : }
246 : else
247 : {
248 1 : GST_WARNING("No playsink ?!?!?");
249 : }
250 222 : if (GST_STATE_CHANGE_FAILURE == m_gstWrapper->gstElementSetState(m_context.pipeline, GST_STATE_READY))
251 : {
252 1 : GST_WARNING("Failed to set pipeline to READY state");
253 : }
254 222 : RIALTO_SERVER_LOG_MIL("New RialtoServer's pipeline created");
255 : }
256 :
257 223 : void GstGenericPlayer::resetWorkerThread()
258 : {
259 : // Shutdown task thread
260 223 : m_workerThread->enqueueTask(m_taskFactory->createShutdown(*this));
261 223 : m_workerThread->join();
262 223 : m_workerThread.reset();
263 : }
264 :
265 222 : void GstGenericPlayer::termPipeline()
266 : {
267 222 : if (m_finishSourceSetupTimer && m_finishSourceSetupTimer->isActive())
268 : {
269 0 : m_finishSourceSetupTimer->cancel();
270 : }
271 :
272 222 : m_finishSourceSetupTimer.reset();
273 :
274 273 : for (auto &elem : m_context.streamInfo)
275 : {
276 51 : StreamInfo &streamInfo = elem.second;
277 53 : for (auto &buffer : streamInfo.buffers)
278 : {
279 2 : m_gstWrapper->gstBufferUnref(buffer);
280 : }
281 :
282 51 : streamInfo.buffers.clear();
283 : }
284 :
285 222 : m_taskFactory->createStop(m_context, *this)->execute();
286 222 : GstBus *bus = m_gstWrapper->gstPipelineGetBus(GST_PIPELINE(m_context.pipeline));
287 222 : m_gstWrapper->gstBusSetSyncHandler(bus, nullptr, nullptr, nullptr);
288 222 : m_gstWrapper->gstObjectUnref(bus);
289 :
290 222 : if (m_context.source)
291 : {
292 1 : m_gstWrapper->gstObjectUnref(m_context.source);
293 : }
294 222 : if (m_context.subtitleSink)
295 : {
296 4 : m_gstWrapper->gstObjectUnref(m_context.subtitleSink);
297 4 : m_context.subtitleSink = nullptr;
298 : }
299 :
300 222 : if (m_context.videoSink)
301 : {
302 0 : m_gstWrapper->gstObjectUnref(m_context.videoSink);
303 0 : m_context.videoSink = nullptr;
304 : }
305 222 : if (m_context.playbackGroup.m_curAudioPlaysinkBin)
306 : {
307 1 : m_gstWrapper->gstObjectUnref(m_context.playbackGroup.m_curAudioPlaysinkBin);
308 1 : m_context.playbackGroup.m_curAudioPlaysinkBin = nullptr;
309 : }
310 :
311 : // Delete the pipeline
312 222 : m_gstWrapper->gstObjectUnref(m_context.pipeline);
313 :
314 222 : RIALTO_SERVER_LOG_MIL("RialtoServer's pipeline terminated");
315 : }
316 :
317 889 : unsigned GstGenericPlayer::getGstPlayFlag(const char *nick)
318 : {
319 : GFlagsClass *flagsClass =
320 889 : static_cast<GFlagsClass *>(m_glibWrapper->gTypeClassRef(m_glibWrapper->gTypeFromName("GstPlayFlags")));
321 889 : GFlagsValue *flag = m_glibWrapper->gFlagsGetValueByNick(flagsClass, nick);
322 889 : return flag ? flag->value : 0;
323 : }
324 :
325 1 : void GstGenericPlayer::setupSource(GstElement *pipeline, GstElement *source, GstGenericPlayer *self)
326 : {
327 1 : self->m_gstWrapper->gstObjectRef(source);
328 1 : if (self->m_workerThread)
329 : {
330 1 : self->m_workerThread->enqueueTask(self->m_taskFactory->createSetupSource(self->m_context, *self, source));
331 : }
332 : }
333 :
334 1 : void GstGenericPlayer::setupElement(GstElement *pipeline, GstElement *element, GstGenericPlayer *self)
335 : {
336 1 : RIALTO_SERVER_LOG_DEBUG("Element %s added to the pipeline", GST_ELEMENT_NAME(element));
337 1 : self->m_gstWrapper->gstObjectRef(element);
338 1 : if (self->m_workerThread)
339 : {
340 1 : self->m_workerThread->enqueueTask(self->m_taskFactory->createSetupElement(self->m_context, *self, element));
341 : }
342 : }
343 :
344 1 : void GstGenericPlayer::deepElementAdded(GstBin *pipeline, GstBin *bin, GstElement *element, GstGenericPlayer *self)
345 : {
346 1 : RIALTO_SERVER_LOG_DEBUG("Deep element %s added to the pipeline", GST_ELEMENT_NAME(element));
347 1 : if (self->m_workerThread)
348 : {
349 2 : self->m_workerThread->enqueueTask(
350 2 : self->m_taskFactory->createDeepElementAdded(self->m_context, *self, pipeline, bin, element));
351 : }
352 1 : }
353 :
354 1 : void GstGenericPlayer::attachSource(const std::unique_ptr<IMediaPipeline::MediaSource> &attachedSource)
355 : {
356 1 : if (m_workerThread)
357 : {
358 1 : m_workerThread->enqueueTask(m_taskFactory->createAttachSource(m_context, *this, attachedSource));
359 : }
360 : }
361 :
362 2 : void GstGenericPlayer::allSourcesAttached()
363 : {
364 2 : if (m_workerThread)
365 : {
366 2 : m_workerThread->enqueueTask(m_taskFactory->createFinishSetupSource(m_context, *this));
367 : }
368 : }
369 :
370 1 : void GstGenericPlayer::attachSamples(const IMediaPipeline::MediaSegmentVector &mediaSegments)
371 : {
372 1 : if (m_workerThread)
373 : {
374 1 : m_workerThread->enqueueTask(m_taskFactory->createAttachSamples(m_context, *this, mediaSegments));
375 : }
376 : }
377 :
378 1 : void GstGenericPlayer::attachSamples(const std::shared_ptr<IDataReader> &dataReader)
379 : {
380 1 : if (m_workerThread)
381 : {
382 1 : m_workerThread->enqueueTask(m_taskFactory->createReadShmDataAndAttachSamples(m_context, *this, dataReader));
383 : }
384 : }
385 :
386 1 : void GstGenericPlayer::setPosition(std::int64_t position)
387 : {
388 1 : if (m_workerThread)
389 : {
390 1 : m_workerThread->enqueueTask(m_taskFactory->createSetPosition(m_context, *this, position));
391 : }
392 : }
393 :
394 1 : void GstGenericPlayer::setPlaybackRate(double rate)
395 : {
396 1 : if (m_workerThread)
397 : {
398 1 : m_workerThread->enqueueTask(m_taskFactory->createSetPlaybackRate(m_context, rate));
399 : }
400 : }
401 :
402 11 : bool GstGenericPlayer::getPosition(std::int64_t &position)
403 : {
404 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
405 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
406 11 : position = getPosition(m_context.pipeline);
407 11 : if (position == -1)
408 : {
409 3 : return false;
410 : }
411 :
412 8 : return true;
413 : }
414 :
415 2 : bool GstGenericPlayer::getDuration(std::int64_t &duration)
416 : {
417 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
418 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
419 2 : if (!m_context.pipeline || !m_gstWrapper->gstElementQueryDuration(m_context.pipeline, GST_FORMAT_TIME, &duration))
420 : {
421 1 : RIALTO_SERVER_LOG_WARN("Failed to query duration");
422 1 : return false;
423 : }
424 1 : return true;
425 : }
426 :
427 50 : GstElement *GstGenericPlayer::getSink(const MediaSourceType &mediaSourceType) const
428 : {
429 50 : const char *kSinkName{nullptr};
430 50 : GstElement *sink{nullptr};
431 50 : switch (mediaSourceType)
432 : {
433 29 : case MediaSourceType::AUDIO:
434 29 : kSinkName = "audio-sink";
435 29 : break;
436 18 : case MediaSourceType::VIDEO:
437 18 : kSinkName = "video-sink";
438 18 : break;
439 1 : case MediaSourceType::SUBTITLE:
440 1 : kSinkName = "text-sink";
441 1 : break;
442 2 : default:
443 2 : break;
444 : }
445 50 : if (!kSinkName)
446 : {
447 2 : RIALTO_SERVER_LOG_WARN("mediaSourceType not supported %d", static_cast<int>(mediaSourceType));
448 : }
449 : else
450 : {
451 48 : if (m_context.pipeline == nullptr)
452 : {
453 0 : RIALTO_SERVER_LOG_WARN("Pipeline is NULL!");
454 : }
455 : else
456 : {
457 48 : RIALTO_SERVER_LOG_DEBUG("Pipeline is valid: %p", m_context.pipeline);
458 : }
459 48 : m_glibWrapper->gObjectGet(m_context.pipeline, kSinkName, &sink, nullptr);
460 48 : if (sink && firebolt::rialto::MediaSourceType::SUBTITLE != mediaSourceType)
461 : {
462 30 : GstElement *autoSink{sink};
463 30 : if (firebolt::rialto::MediaSourceType::VIDEO == mediaSourceType)
464 14 : autoSink = getSinkChildIfAutoVideoSink(sink);
465 16 : else if (firebolt::rialto::MediaSourceType::AUDIO == mediaSourceType)
466 16 : autoSink = getSinkChildIfAutoAudioSink(sink);
467 :
468 : // Is this an auto-sink?...
469 30 : if (autoSink != sink)
470 : {
471 2 : m_gstWrapper->gstObjectUnref(GST_OBJECT(sink));
472 :
473 : // increase the reference count of the auto sink
474 2 : sink = GST_ELEMENT(m_gstWrapper->gstObjectRef(GST_OBJECT(autoSink)));
475 : }
476 : }
477 : }
478 50 : return sink;
479 : }
480 :
481 1 : void GstGenericPlayer::setSourceFlushed(const MediaSourceType &mediaSourceType)
482 : {
483 1 : m_flushWatcher->setFlushed(mediaSourceType);
484 : }
485 :
486 6 : void GstGenericPlayer::notifyPlaybackInfo()
487 : {
488 6 : PlaybackInfo info;
489 6 : getPosition(info.currentPosition);
490 6 : getVolume(info.volume);
491 6 : m_gstPlayerClient->notifyPlaybackInfo(info);
492 : }
493 :
494 23 : GstElement *GstGenericPlayer::getDecoder(const MediaSourceType &mediaSourceType)
495 : {
496 23 : GstIterator *it = m_gstWrapper->gstBinIterateRecurse(GST_BIN(m_context.pipeline));
497 23 : GValue item = G_VALUE_INIT;
498 23 : gboolean done = FALSE;
499 :
500 32 : while (!done)
501 : {
502 25 : switch (m_gstWrapper->gstIteratorNext(it, &item))
503 : {
504 16 : case GST_ITERATOR_OK:
505 : {
506 16 : GstElement *element = GST_ELEMENT(m_glibWrapper->gValueGetObject(&item));
507 16 : GstElementFactory *factory = m_gstWrapper->gstElementGetFactory(element);
508 :
509 16 : if (factory)
510 : {
511 16 : GstElementFactoryListType type = GST_ELEMENT_FACTORY_TYPE_DECODER;
512 16 : if (mediaSourceType == MediaSourceType::AUDIO)
513 : {
514 12 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO;
515 : }
516 4 : else if (mediaSourceType == MediaSourceType::VIDEO)
517 : {
518 4 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO;
519 : }
520 :
521 16 : if (m_gstWrapper->gstElementFactoryListIsType(factory, type))
522 : {
523 16 : m_glibWrapper->gValueUnset(&item);
524 16 : m_gstWrapper->gstIteratorFree(it);
525 16 : return GST_ELEMENT(m_gstWrapper->gstObjectRef(element));
526 : }
527 : }
528 :
529 0 : m_glibWrapper->gValueUnset(&item);
530 0 : break;
531 : }
532 2 : case GST_ITERATOR_RESYNC:
533 2 : m_gstWrapper->gstIteratorResync(it);
534 2 : break;
535 7 : case GST_ITERATOR_ERROR:
536 : case GST_ITERATOR_DONE:
537 7 : done = TRUE;
538 7 : break;
539 : }
540 : }
541 :
542 7 : RIALTO_SERVER_LOG_WARN("Could not find decoder");
543 :
544 7 : m_glibWrapper->gValueUnset(&item);
545 7 : m_gstWrapper->gstIteratorFree(it);
546 :
547 7 : return nullptr;
548 : }
549 :
550 3 : GstElement *GstGenericPlayer::getParser(const MediaSourceType &mediaSourceType)
551 : {
552 3 : GstIterator *it = m_gstWrapper->gstBinIterateRecurse(GST_BIN(m_context.pipeline));
553 3 : GValue item = G_VALUE_INIT;
554 3 : gboolean done = FALSE;
555 :
556 4 : while (!done)
557 : {
558 3 : switch (m_gstWrapper->gstIteratorNext(it, &item))
559 : {
560 2 : case GST_ITERATOR_OK:
561 : {
562 2 : GstElement *element = GST_ELEMENT(m_glibWrapper->gValueGetObject(&item));
563 2 : GstElementFactory *factory = m_gstWrapper->gstElementGetFactory(element);
564 :
565 2 : if (factory)
566 : {
567 2 : GstElementFactoryListType type = GST_ELEMENT_FACTORY_TYPE_PARSER;
568 2 : if (mediaSourceType == MediaSourceType::AUDIO)
569 : {
570 0 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO;
571 : }
572 2 : else if (mediaSourceType == MediaSourceType::VIDEO)
573 : {
574 2 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO;
575 : }
576 :
577 2 : if (m_gstWrapper->gstElementFactoryListIsType(factory, type))
578 : {
579 2 : m_glibWrapper->gValueUnset(&item);
580 2 : m_gstWrapper->gstIteratorFree(it);
581 2 : return GST_ELEMENT(m_gstWrapper->gstObjectRef(element));
582 : }
583 : }
584 :
585 0 : m_glibWrapper->gValueUnset(&item);
586 0 : break;
587 : }
588 0 : case GST_ITERATOR_RESYNC:
589 0 : m_gstWrapper->gstIteratorResync(it);
590 0 : break;
591 1 : case GST_ITERATOR_ERROR:
592 : case GST_ITERATOR_DONE:
593 1 : done = TRUE;
594 1 : break;
595 : }
596 : }
597 :
598 1 : RIALTO_SERVER_LOG_WARN("Could not find parser");
599 :
600 1 : m_glibWrapper->gValueUnset(&item);
601 1 : m_gstWrapper->gstIteratorFree(it);
602 :
603 1 : return nullptr;
604 : }
605 :
606 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate>
607 7 : GstGenericPlayer::createAudioAttributes(const std::unique_ptr<IMediaPipeline::MediaSource> &source) const
608 : {
609 7 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate> audioAttributes;
610 7 : const IMediaPipeline::MediaSourceAudio *kSource = dynamic_cast<IMediaPipeline::MediaSourceAudio *>(source.get());
611 7 : if (kSource)
612 : {
613 6 : firebolt::rialto::AudioConfig audioConfig = kSource->getAudioConfig();
614 : audioAttributes =
615 18 : firebolt::rialto::wrappers::AudioAttributesPrivate{"", // param set below.
616 6 : audioConfig.numberOfChannels, audioConfig.sampleRate,
617 : 0, // used only in one of logs in rdk_gstreamer_utils, no
618 : // need to set this param.
619 : 0, // used only in one of logs in rdk_gstreamer_utils, no
620 : // need to set this param.
621 6 : audioConfig.codecSpecificConfig.data(),
622 : static_cast<std::uint32_t>(
623 6 : audioConfig.codecSpecificConfig.size())};
624 6 : if (source->getMimeType() == "audio/mp4" || source->getMimeType() == "audio/aac")
625 : {
626 4 : audioAttributes->m_codecParam = "mp4a";
627 : }
628 2 : else if (source->getMimeType() == "audio/x-eac3")
629 : {
630 1 : audioAttributes->m_codecParam = "ec-3";
631 : }
632 1 : else if (source->getMimeType() == "audio/b-wav" || source->getMimeType() == "audio/x-raw")
633 : {
634 1 : audioAttributes->m_codecParam = "lpcm";
635 : }
636 6 : }
637 : else
638 : {
639 1 : RIALTO_SERVER_LOG_ERROR("Failed to cast source");
640 : }
641 :
642 7 : return audioAttributes;
643 : }
644 :
645 2 : void GstGenericPlayer::configAudioCap(firebolt::rialto::wrappers::AudioAttributesPrivate *pAttrib, bool *audioaac,
646 : bool svpenabled, GstCaps **appsrcCaps)
647 : {
648 : // this function comes from rdk_gstreamer_utils
649 2 : if (!pAttrib || !audioaac || !appsrcCaps)
650 : {
651 0 : RIALTO_SERVER_LOG_ERROR("configAudioCap: invalid null parameter");
652 0 : return;
653 : }
654 : gchar *capsString;
655 2 : RIALTO_SERVER_LOG_DEBUG("Config audio codec %s sampling rate %d channel %d alignment %d",
656 : pAttrib->m_codecParam.c_str(), pAttrib->m_samplesPerSecond, pAttrib->m_numberOfChannels,
657 : pAttrib->m_blockAlignment);
658 6 : if (pAttrib->m_codecParam.compare(0, 4, std::string("mp4a")) == 0)
659 : {
660 2 : RIALTO_SERVER_LOG_DEBUG("Using AAC");
661 2 : capsString = m_glibWrapper->gStrdupPrintf("audio/mpeg, mpegversion=4, enable-svp=(string)%s",
662 : svpenabled ? "true" : "false");
663 2 : *audioaac = true;
664 : }
665 : else
666 : {
667 0 : RIALTO_SERVER_LOG_DEBUG("Using EAC3");
668 0 : capsString = m_glibWrapper->gStrdupPrintf("audio/x-eac3, framed=(boolean)true, rate=(int)%u, channels=(int)%u, "
669 : "alignment=(string)frame, enable-svp=(string)%s",
670 : pAttrib->m_samplesPerSecond, pAttrib->m_numberOfChannels,
671 : svpenabled ? "true" : "false");
672 0 : *audioaac = false;
673 : }
674 2 : *appsrcCaps = m_gstWrapper->gstCapsFromString(capsString);
675 2 : m_glibWrapper->gFree(capsString);
676 : }
677 :
678 1 : void GstGenericPlayer::haltAudioPlayback()
679 : {
680 : // this function comes from rdk_gstreamer_utils
681 1 : if (!m_context.playbackGroup.m_curAudioPlaysinkBin || !m_context.playbackGroup.m_curAudioDecodeBin)
682 : {
683 0 : RIALTO_SERVER_LOG_ERROR("haltAudioPlayback: audio playsink bin or decode bin is null");
684 0 : return;
685 : }
686 1 : GstState currentState{GST_STATE_VOID_PENDING}, pending{GST_STATE_VOID_PENDING};
687 :
688 : // Transition Playsink to Ready
689 1 : if (GST_STATE_CHANGE_FAILURE ==
690 1 : m_gstWrapper->gstElementSetState(m_context.playbackGroup.m_curAudioPlaysinkBin, GST_STATE_READY))
691 : {
692 0 : RIALTO_SERVER_LOG_WARN("Failed to set AudioPlaysinkBin to READY");
693 0 : return;
694 : }
695 1 : m_gstWrapper->gstElementGetState(m_context.playbackGroup.m_curAudioPlaysinkBin, ¤tState, &pending,
696 : GST_CLOCK_TIME_NONE);
697 1 : if (currentState == GST_STATE_PAUSED)
698 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioPlaySinkBin State = %d", currentState);
699 : // Transition Decodebin to Paused
700 1 : if (GST_STATE_CHANGE_FAILURE ==
701 1 : m_gstWrapper->gstElementSetState(m_context.playbackGroup.m_curAudioDecodeBin, GST_STATE_PAUSED))
702 : {
703 0 : RIALTO_SERVER_LOG_WARN("Failed to set AudioDecodeBin to PAUSED");
704 0 : return;
705 : }
706 1 : m_gstWrapper->gstElementGetState(m_context.playbackGroup.m_curAudioDecodeBin, ¤tState, &pending,
707 : GST_CLOCK_TIME_NONE);
708 1 : if (currentState == GST_STATE_PAUSED)
709 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current DecodeBin State = %d", currentState);
710 : }
711 :
712 1 : void GstGenericPlayer::resumeAudioPlayback()
713 : {
714 : // this function comes from rdk_gstreamer_utils
715 1 : if (!m_context.playbackGroup.m_curAudioPlaysinkBin || !m_context.playbackGroup.m_curAudioDecodeBin)
716 : {
717 0 : RIALTO_SERVER_LOG_ERROR("resumeAudioPlayback: audio playsink bin or decode bin is null");
718 0 : return;
719 : }
720 1 : GstState currentState{GST_STATE_VOID_PENDING}, pending{GST_STATE_VOID_PENDING};
721 1 : m_gstWrapper->gstElementSyncStateWithParent(m_context.playbackGroup.m_curAudioPlaysinkBin);
722 1 : m_gstWrapper->gstElementGetState(m_context.playbackGroup.m_curAudioPlaysinkBin, ¤tState, &pending,
723 : GST_CLOCK_TIME_NONE);
724 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> AudioPlaysinkbin State = %d Pending = %d", currentState, pending);
725 1 : m_gstWrapper->gstElementSyncStateWithParent(m_context.playbackGroup.m_curAudioDecodeBin);
726 1 : m_gstWrapper->gstElementGetState(m_context.playbackGroup.m_curAudioDecodeBin, ¤tState, &pending,
727 : GST_CLOCK_TIME_NONE);
728 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> Decodebin State = %d Pending = %d", currentState, pending);
729 : }
730 :
731 1 : void GstGenericPlayer::firstTimeSwitchFromAC3toAAC(GstCaps *newAudioCaps)
732 : {
733 : // this function comes from rdk_gstreamer_utils
734 1 : if (!m_context.playbackGroup.m_curAudioTypefind || !m_context.playbackGroup.m_curAudioDecodeBin)
735 : {
736 0 : RIALTO_SERVER_LOG_ERROR("firstTimeSwitchFromAC3toAAC: audio typefind or decode bin is null");
737 0 : return;
738 : }
739 1 : GstState currentState{GST_STATE_VOID_PENDING}, pending{GST_STATE_VOID_PENDING};
740 1 : GstPad *pTypfdSrcPad = NULL;
741 1 : GstPad *pTypfdSrcPeerPad = NULL;
742 1 : GstPad *pNewAudioDecoderSrcPad = NULL;
743 1 : GstElement *newAudioParse = NULL;
744 1 : GstElement *newAudioDecoder = NULL;
745 1 : GstElement *newQueue = NULL;
746 1 : gboolean linkRet = false;
747 :
748 : /* Get the SinkPad of ASink - pTypfdSrcPeerPad */
749 1 : if ((pTypfdSrcPad = m_gstWrapper->gstElementGetStaticPad(m_context.playbackGroup.m_curAudioTypefind, "src")) !=
750 : NULL) // Unref the Pad
751 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current Typefind SrcPad = %p", pTypfdSrcPad);
752 1 : if ((pTypfdSrcPeerPad = m_gstWrapper->gstPadGetPeer(pTypfdSrcPad)) != NULL) // Unref the Pad
753 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current Typefind Src Downstream Element Pad = %p", pTypfdSrcPeerPad);
754 : // AudioDecoder Downstream Unlink
755 1 : if (m_gstWrapper->gstPadUnlink(pTypfdSrcPad, pTypfdSrcPeerPad) == FALSE)
756 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Typefind Downstream Unlink Failed");
757 1 : newAudioParse = m_gstWrapper->gstElementFactoryMake("aacparse", "aacparse");
758 1 : newAudioDecoder = m_gstWrapper->gstElementFactoryMake("avdec_aac", "avdec_aac");
759 1 : newQueue = m_gstWrapper->gstElementFactoryMake("queue", "aqueue");
760 : // Add new Decoder to Decodebin
761 1 : if (m_gstWrapper->gstBinAdd(GST_BIN(m_context.playbackGroup.m_curAudioDecodeBin.load()), newAudioDecoder) == TRUE)
762 : {
763 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> Added New AudioDecoder = %p", newAudioDecoder);
764 : }
765 : // Add new Parser to Decodebin
766 1 : if (m_gstWrapper->gstBinAdd(GST_BIN(m_context.playbackGroup.m_curAudioDecodeBin.load()), newAudioParse) == TRUE)
767 : {
768 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> Added New AudioParser = %p", newAudioParse);
769 : }
770 : // Add new Queue to Decodebin
771 1 : if (m_gstWrapper->gstBinAdd(GST_BIN(m_context.playbackGroup.m_curAudioDecodeBin.load()), newQueue) == TRUE)
772 : {
773 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> Added New queue = %p", newQueue);
774 : }
775 1 : if ((pNewAudioDecoderSrcPad = m_gstWrapper->gstElementGetStaticPad(newAudioDecoder, "src")) != NULL) // Unref the Pad
776 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioDecoder Src Pad = %p", pNewAudioDecoderSrcPad);
777 : // Connect decoder to ASINK
778 1 : if (m_gstWrapper->gstPadLink(pNewAudioDecoderSrcPad, pTypfdSrcPeerPad) != GST_PAD_LINK_OK)
779 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioDecoder Downstream Link Failed");
780 2 : linkRet = m_gstWrapper->gstElementLink(newAudioParse, newQueue) &&
781 1 : m_gstWrapper->gstElementLink(newQueue, newAudioDecoder);
782 1 : if (!linkRet)
783 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Downstream Link Failed for typefind, parser, decoder");
784 : /* Force Caps */
785 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> Typefind Setting to READY");
786 1 : if (GST_STATE_CHANGE_FAILURE ==
787 1 : m_gstWrapper->gstElementSetState(m_context.playbackGroup.m_curAudioTypefind, GST_STATE_READY))
788 : {
789 0 : RIALTO_SERVER_LOG_WARN("Failed to set Typefind to READY");
790 0 : m_gstWrapper->gstObjectUnref(pTypfdSrcPad);
791 0 : m_gstWrapper->gstObjectUnref(pTypfdSrcPeerPad);
792 0 : m_gstWrapper->gstObjectUnref(pNewAudioDecoderSrcPad);
793 0 : return;
794 : }
795 1 : m_glibWrapper->gObjectSet(G_OBJECT(m_context.playbackGroup.m_curAudioTypefind), "force-caps", newAudioCaps, NULL);
796 1 : m_gstWrapper->gstElementSyncStateWithParent(m_context.playbackGroup.m_curAudioTypefind);
797 1 : m_gstWrapper->gstElementGetState(m_context.playbackGroup.m_curAudioTypefind, ¤tState, &pending,
798 : GST_CLOCK_TIME_NONE);
799 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> New Typefind State = %d Pending = %d", currentState, pending);
800 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> Typefind Syncing with Parent");
801 1 : m_context.playbackGroup.m_linkTypefindParser = true;
802 : /* Update the state */
803 1 : m_gstWrapper->gstElementSyncStateWithParent(newAudioDecoder);
804 1 : m_gstWrapper->gstElementGetState(newAudioDecoder, ¤tState, &pending, GST_CLOCK_TIME_NONE);
805 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioDecoder State = %d Pending = %d", currentState, pending);
806 1 : m_gstWrapper->gstElementSyncStateWithParent(newQueue);
807 1 : m_gstWrapper->gstElementGetState(newQueue, ¤tState, &pending, GST_CLOCK_TIME_NONE);
808 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> New queue State = %d Pending = %d", currentState, pending);
809 1 : m_gstWrapper->gstElementSyncStateWithParent(newAudioParse);
810 1 : m_gstWrapper->gstElementGetState(newAudioParse, ¤tState, &pending, GST_CLOCK_TIME_NONE);
811 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioParser State = %d Pending = %d", currentState, pending);
812 1 : m_gstWrapper->gstObjectUnref(pTypfdSrcPad);
813 1 : m_gstWrapper->gstObjectUnref(pTypfdSrcPeerPad);
814 1 : m_gstWrapper->gstObjectUnref(pNewAudioDecoderSrcPad);
815 1 : return;
816 : }
817 :
818 1 : bool GstGenericPlayer::switchAudioCodec(bool isAudioAAC, GstCaps *newAudioCaps)
819 : { // this function comes from rdk_gstreamer_utils
820 1 : bool ret = false;
821 1 : RIALTO_SERVER_LOG_DEBUG("Current Audio Codec AAC = %d Same as Incoming audio Codec AAC = %d",
822 : m_context.playbackGroup.m_isAudioAAC, isAudioAAC);
823 1 : if (m_context.playbackGroup.m_isAudioAAC == isAudioAAC)
824 : {
825 0 : return ret;
826 : }
827 1 : if ((m_context.playbackGroup.m_curAudioDecoder == NULL) && (!(m_context.playbackGroup.m_isAudioAAC)) && (isAudioAAC))
828 : {
829 1 : firstTimeSwitchFromAC3toAAC(newAudioCaps);
830 1 : m_context.playbackGroup.m_isAudioAAC = isAudioAAC;
831 1 : return true;
832 : }
833 0 : if (!m_context.playbackGroup.m_curAudioDecoder || !m_context.playbackGroup.m_curAudioParse ||
834 0 : !m_context.playbackGroup.m_curAudioDecodeBin)
835 : {
836 0 : RIALTO_SERVER_LOG_ERROR("switchAudioCodec: audio decoder, parser or decode bin is null");
837 0 : return false;
838 : }
839 0 : GstElement *newAudioParse = NULL;
840 0 : GstElement *newAudioDecoder = NULL;
841 0 : GstPad *newAudioParseSrcPad = NULL;
842 0 : GstPad *newAudioParseSinkPad = NULL;
843 0 : GstPad *newAudioDecoderSrcPad = NULL;
844 0 : GstPad *newAudioDecoderSinkPad = NULL;
845 0 : GstPad *audioDecSrcPad = NULL;
846 0 : GstPad *audioDecSinkPad = NULL;
847 0 : GstPad *audioDecSrcPeerPad = NULL;
848 0 : GstPad *audioDecSinkPeerPad = NULL;
849 0 : GstPad *audioParseSrcPad = NULL;
850 0 : GstPad *audioParseSinkPad = NULL;
851 0 : GstPad *audioParseSrcPeerPad = NULL;
852 0 : GstPad *audioParseSinkPeerPad = NULL;
853 0 : GstState currentState{GST_STATE_VOID_PENDING}, pending{GST_STATE_VOID_PENDING};
854 :
855 : // Get AudioDecoder Src Pads
856 0 : if ((audioDecSrcPad = m_gstWrapper->gstElementGetStaticPad(m_context.playbackGroup.m_curAudioDecoder, "src")) !=
857 : NULL) // Unref the Pad
858 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioDecoder Src Pad = %p", audioDecSrcPad);
859 : // Get AudioDecoder Sink Pads
860 0 : if ((audioDecSinkPad = m_gstWrapper->gstElementGetStaticPad(m_context.playbackGroup.m_curAudioDecoder, "sink")) !=
861 : NULL) // Unref the Pad
862 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioDecoder Sink Pad = %p", audioDecSinkPad);
863 : // Get AudioDecoder Src Peer i.e. Downstream Element Pad
864 0 : if ((audioDecSrcPeerPad = m_gstWrapper->gstPadGetPeer(audioDecSrcPad)) != NULL) // Unref the Pad
865 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioDecoder Src Downstream Element Pad = %p", audioDecSrcPeerPad);
866 : // Get AudioDecoder Sink Peer i.e. Upstream Element Pad
867 0 : if ((audioDecSinkPeerPad = m_gstWrapper->gstPadGetPeer(audioDecSinkPad)) != NULL) // Unref the Pad
868 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioDecoder Sink Upstream Element Pad = %p", audioDecSinkPeerPad);
869 : // Get AudioParser Src Pads
870 0 : if ((audioParseSrcPad = m_gstWrapper->gstElementGetStaticPad(m_context.playbackGroup.m_curAudioParse, "src")) !=
871 : NULL) // Unref the Pad
872 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioParser Src Pad = %p", audioParseSrcPad);
873 : // Get AudioParser Sink Pads
874 0 : if ((audioParseSinkPad = m_gstWrapper->gstElementGetStaticPad(m_context.playbackGroup.m_curAudioParse, "sink")) !=
875 : NULL) // Unref the Pad
876 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioParser Sink Pad = %p", audioParseSinkPad);
877 : // Get AudioParser Src Peer i.e. Downstream Element Pad
878 0 : if ((audioParseSrcPeerPad = m_gstWrapper->gstPadGetPeer(audioParseSrcPad)) != NULL) // Unref the Peer Pad
879 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioParser Src Downstream Element Pad = %p", audioParseSrcPeerPad);
880 : // Get AudioParser Sink Peer i.e. Upstream Element Pad
881 0 : if ((audioParseSinkPeerPad = m_gstWrapper->gstPadGetPeer(audioParseSinkPad)) != NULL) // Unref the Peer Pad
882 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioParser Sink Upstream Element Pad = %p", audioParseSinkPeerPad);
883 : // AudioDecoder Downstream Unlink
884 0 : if (m_gstWrapper->gstPadUnlink(audioDecSrcPad, audioDecSrcPeerPad) == FALSE)
885 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> AudioDecoder Downstream Unlink Failed");
886 : // AudioDecoder Upstream Unlink
887 0 : if (m_gstWrapper->gstPadUnlink(audioDecSinkPeerPad, audioDecSinkPad) == FALSE)
888 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> AudioDecoder Upstream Unlink Failed");
889 : // AudioParser Downstream Unlink
890 0 : if (m_gstWrapper->gstPadUnlink(audioParseSrcPad, audioParseSrcPeerPad) == FALSE)
891 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> AudioParser Downstream Unlink Failed");
892 : // AudioParser Upstream Unlink
893 0 : if (m_gstWrapper->gstPadUnlink(audioParseSinkPeerPad, audioParseSinkPad) == FALSE)
894 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> AudioParser Upstream Unlink Failed");
895 : // Current Audio Decoder NULL
896 0 : if (GST_STATE_CHANGE_FAILURE ==
897 0 : m_gstWrapper->gstElementSetState(m_context.playbackGroup.m_curAudioDecoder, GST_STATE_NULL))
898 : {
899 0 : RIALTO_SERVER_LOG_WARN("Failed to set AudioDecoder to NULL");
900 : }
901 0 : m_gstWrapper->gstElementGetState(m_context.playbackGroup.m_curAudioDecoder, ¤tState, &pending,
902 : GST_CLOCK_TIME_NONE);
903 0 : if (currentState == GST_STATE_NULL)
904 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioDecoder State = %d", currentState);
905 : // Current Audio Parser NULL
906 0 : if (GST_STATE_CHANGE_FAILURE ==
907 0 : m_gstWrapper->gstElementSetState(m_context.playbackGroup.m_curAudioParse, GST_STATE_NULL))
908 : {
909 0 : RIALTO_SERVER_LOG_WARN("Failed to set AudioParser to NULL");
910 : }
911 0 : m_gstWrapper->gstElementGetState(m_context.playbackGroup.m_curAudioParse, ¤tState, &pending,
912 : GST_CLOCK_TIME_NONE);
913 0 : if (currentState == GST_STATE_NULL)
914 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioParser State = %d", currentState);
915 : // Remove Audio Decoder From Decodebin
916 0 : if (m_gstWrapper->gstBinRemove(GST_BIN(m_context.playbackGroup.m_curAudioDecodeBin.load()),
917 0 : m_context.playbackGroup.m_curAudioDecoder) == TRUE)
918 : {
919 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Removed AudioDecoder = %p", m_context.playbackGroup.m_curAudioDecoder);
920 0 : m_context.playbackGroup.m_curAudioDecoder = NULL;
921 : }
922 : // Remove Audio Parser From Decodebin
923 0 : if (m_gstWrapper->gstBinRemove(GST_BIN(m_context.playbackGroup.m_curAudioDecodeBin.load()),
924 0 : m_context.playbackGroup.m_curAudioParse) == TRUE)
925 : {
926 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Removed AudioParser = %p", m_context.playbackGroup.m_curAudioParse);
927 0 : m_context.playbackGroup.m_curAudioParse = NULL;
928 : }
929 : // Create new Audio Decoder and Parser. The inverse of the current
930 0 : if (m_context.playbackGroup.m_isAudioAAC)
931 : {
932 0 : newAudioParse = m_gstWrapper->gstElementFactoryMake("ac3parse", "ac3parse");
933 0 : newAudioDecoder = m_gstWrapper->gstElementFactoryMake("identity", "fake_aud_ac3dec");
934 : }
935 : else
936 : {
937 0 : newAudioParse = m_gstWrapper->gstElementFactoryMake("aacparse", "aacparse");
938 0 : newAudioDecoder = m_gstWrapper->gstElementFactoryMake("avdec_aac", "avdec_aac");
939 : }
940 : {
941 0 : GstPadLinkReturn gstPadLinkRet = GST_PAD_LINK_OK;
942 0 : GstElement *audioParseUpstreamEl = NULL;
943 : // Add new Decoder to Decodebin
944 0 : if (m_gstWrapper->gstBinAdd(GST_BIN(m_context.playbackGroup.m_curAudioDecodeBin.load()), newAudioDecoder) == TRUE)
945 : {
946 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Added New AudioDecoder = %p", newAudioDecoder);
947 : }
948 : // Add new Parser to Decodebin
949 0 : if (m_gstWrapper->gstBinAdd(GST_BIN(m_context.playbackGroup.m_curAudioDecodeBin.load()), newAudioParse) == TRUE)
950 : {
951 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Added New AudioParser = %p", newAudioParse);
952 : }
953 0 : if ((newAudioDecoderSrcPad = m_gstWrapper->gstElementGetStaticPad(newAudioDecoder, "src")) !=
954 : NULL) // Unref the Pad
955 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioDecoder Src Pad = %p", newAudioDecoderSrcPad);
956 0 : if ((newAudioDecoderSinkPad = m_gstWrapper->gstElementGetStaticPad(newAudioDecoder, "sink")) !=
957 : NULL) // Unref the Pad
958 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioDecoder Sink Pad = %p", newAudioDecoderSinkPad);
959 : // Link New Decoder to Downstream followed by UpStream
960 0 : if ((gstPadLinkRet = m_gstWrapper->gstPadLink(newAudioDecoderSrcPad, audioDecSrcPeerPad)) != GST_PAD_LINK_OK)
961 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioDecoder Downstream Link Failed");
962 0 : if ((gstPadLinkRet = m_gstWrapper->gstPadLink(audioDecSinkPeerPad, newAudioDecoderSinkPad)) != GST_PAD_LINK_OK)
963 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioDecoder Upstream Link Failed");
964 0 : if ((newAudioParseSrcPad = m_gstWrapper->gstElementGetStaticPad(newAudioParse, "src")) != NULL) // Unref the Pad
965 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioParser Src Pad = %p", newAudioParseSrcPad);
966 0 : if ((newAudioParseSinkPad = m_gstWrapper->gstElementGetStaticPad(newAudioParse, "sink")) != NULL) // Unref the Pad
967 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioParser Sink Pad = %p", newAudioParseSinkPad);
968 : // Link New Parser to Downstream followed by UpStream
969 0 : if ((gstPadLinkRet = m_gstWrapper->gstPadLink(newAudioParseSrcPad, audioParseSrcPeerPad)) != GST_PAD_LINK_OK)
970 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioParser Downstream Link Failed %d", gstPadLinkRet);
971 0 : if ((audioParseUpstreamEl = GST_ELEMENT_CAST(m_gstWrapper->gstPadGetParent(audioParseSinkPeerPad))) ==
972 0 : m_context.playbackGroup.m_curAudioTypefind)
973 : {
974 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Typefind Setting to READY");
975 0 : if (GST_STATE_CHANGE_FAILURE == m_gstWrapper->gstElementSetState(audioParseUpstreamEl, GST_STATE_READY))
976 : {
977 0 : RIALTO_SERVER_LOG_WARN("Failed to set Typefind to READY in switchAudioCodec");
978 : }
979 0 : m_glibWrapper->gObjectSet(G_OBJECT(audioParseUpstreamEl), "force-caps", newAudioCaps, NULL);
980 0 : m_gstWrapper->gstElementSyncStateWithParent(audioParseUpstreamEl);
981 0 : m_gstWrapper->gstElementGetState(audioParseUpstreamEl, ¤tState, &pending, GST_CLOCK_TIME_NONE);
982 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New Typefind State = %d Pending = %d", currentState, pending);
983 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Typefind Syncing with Parent");
984 0 : m_context.playbackGroup.m_linkTypefindParser = true;
985 0 : m_gstWrapper->gstObjectUnref(audioParseUpstreamEl);
986 : }
987 0 : m_gstWrapper->gstObjectUnref(newAudioDecoderSrcPad);
988 0 : m_gstWrapper->gstObjectUnref(newAudioDecoderSinkPad);
989 0 : m_gstWrapper->gstObjectUnref(newAudioParseSrcPad);
990 0 : m_gstWrapper->gstObjectUnref(newAudioParseSinkPad);
991 : }
992 0 : m_gstWrapper->gstObjectUnref(audioParseSinkPeerPad);
993 0 : m_gstWrapper->gstObjectUnref(audioParseSrcPeerPad);
994 0 : m_gstWrapper->gstObjectUnref(audioParseSinkPad);
995 0 : m_gstWrapper->gstObjectUnref(audioParseSrcPad);
996 0 : m_gstWrapper->gstObjectUnref(audioDecSinkPeerPad);
997 0 : m_gstWrapper->gstObjectUnref(audioDecSrcPeerPad);
998 0 : m_gstWrapper->gstObjectUnref(audioDecSinkPad);
999 0 : m_gstWrapper->gstObjectUnref(audioDecSrcPad);
1000 0 : m_gstWrapper->gstElementSyncStateWithParent(newAudioDecoder);
1001 0 : m_gstWrapper->gstElementGetState(newAudioDecoder, ¤tState, &pending, GST_CLOCK_TIME_NONE);
1002 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioDecoder State = %d Pending = %d", currentState, pending);
1003 0 : m_gstWrapper->gstElementSyncStateWithParent(newAudioParse);
1004 0 : m_gstWrapper->gstElementGetState(newAudioParse, ¤tState, &pending, GST_CLOCK_TIME_NONE);
1005 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioParser State = %d Pending = %d", currentState, pending);
1006 0 : m_context.playbackGroup.m_isAudioAAC = isAudioAAC;
1007 0 : return true;
1008 : }
1009 :
1010 2 : bool GstGenericPlayer::performAudioTrackCodecChannelSwitch(const void *pSampleAttr,
1011 : firebolt::rialto::wrappers::AudioAttributesPrivate *pAudioAttr,
1012 : uint32_t *pStatus, unsigned int *pui32Delay,
1013 : long long *pAudioChangeTargetPts, // NOLINT(runtime/int)
1014 : const long long *pcurrentDispPts, // NOLINT(runtime/int)
1015 : unsigned int *audioChangeStage, GstCaps **appsrcCaps,
1016 : bool *audioaac, bool svpenabled, GstElement *aSrc, bool *ret)
1017 : {
1018 : // this function comes from rdk_gstreamer_utils
1019 2 : if (!pStatus || !pui32Delay || !pAudioChangeTargetPts || !pcurrentDispPts || !audioChangeStage || !appsrcCaps ||
1020 2 : !audioaac || !aSrc || !ret)
1021 : {
1022 0 : RIALTO_SERVER_LOG_ERROR("performAudioTrackCodecChannelSwitch: invalid null parameter");
1023 0 : return false;
1024 : }
1025 :
1026 2 : constexpr uint32_t kOk = 0;
1027 2 : constexpr uint32_t kWaitWhileIdling = 100;
1028 2 : constexpr int kAudioChangeGapThresholdMS = 40;
1029 2 : constexpr unsigned int kAudchgAlign = 3;
1030 :
1031 : struct timespec ts, now;
1032 : unsigned int reconfigDelayMs;
1033 2 : clock_gettime(CLOCK_MONOTONIC, &ts);
1034 2 : if (*pStatus != kOk || pSampleAttr == nullptr)
1035 : {
1036 0 : RIALTO_SERVER_LOG_DEBUG("No audio data ready yet");
1037 0 : *pui32Delay = kWaitWhileIdling;
1038 0 : *ret = false;
1039 0 : return true;
1040 : }
1041 2 : RIALTO_SERVER_LOG_DEBUG("Received first audio packet after a flush, PTS");
1042 2 : if (pAudioAttr)
1043 : {
1044 2 : const char *pCodecStr = pAudioAttr->m_codecParam.c_str();
1045 2 : const char *pCodecAcc = strstr(pCodecStr, "mp4a");
1046 2 : bool isAudioAAC = (pCodecAcc) ? true : false;
1047 2 : bool isCodecSwitch = false;
1048 2 : RIALTO_SERVER_LOG_DEBUG("Audio Attribute format %s channel %d samp %d, bitrate %d blockAlignment %d", pCodecStr,
1049 : pAudioAttr->m_numberOfChannels, pAudioAttr->m_samplesPerSecond, pAudioAttr->m_bitrate,
1050 : pAudioAttr->m_blockAlignment);
1051 2 : *pAudioChangeTargetPts = *pcurrentDispPts;
1052 2 : *audioChangeStage = kAudchgAlign;
1053 2 : if (*appsrcCaps)
1054 : {
1055 2 : m_gstWrapper->gstCapsUnref(*appsrcCaps);
1056 2 : *appsrcCaps = NULL;
1057 : }
1058 2 : if (isAudioAAC != *audioaac)
1059 1 : isCodecSwitch = true;
1060 2 : configAudioCap(pAudioAttr, audioaac, svpenabled, appsrcCaps);
1061 : {
1062 2 : gboolean sendRet = FALSE;
1063 2 : GstEvent *flushStart = NULL;
1064 2 : GstEvent *flushStop = NULL;
1065 2 : flushStart = m_gstWrapper->gstEventNewFlushStart();
1066 2 : sendRet = m_gstWrapper->gstElementSendEvent(aSrc, flushStart);
1067 2 : if (!sendRet)
1068 0 : RIALTO_SERVER_LOG_DEBUG("failed to send flush-start event");
1069 2 : flushStop = m_gstWrapper->gstEventNewFlushStop(TRUE);
1070 2 : sendRet = m_gstWrapper->gstElementSendEvent(aSrc, flushStop);
1071 2 : if (!sendRet)
1072 0 : RIALTO_SERVER_LOG_DEBUG("failed to send flush-stop event");
1073 : }
1074 2 : if (!isCodecSwitch)
1075 : {
1076 1 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(aSrc), *appsrcCaps);
1077 : }
1078 : else
1079 : {
1080 1 : RIALTO_SERVER_LOG_DEBUG("CODEC SWITCH mAudioAAC = %d", *audioaac);
1081 1 : haltAudioPlayback();
1082 1 : if (switchAudioCodec(*audioaac, *appsrcCaps) == false)
1083 : {
1084 0 : RIALTO_SERVER_LOG_DEBUG("CODEC SWITCH FAILED switchAudioCodec mAudioAAC = %d", *audioaac);
1085 : }
1086 1 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(aSrc), *appsrcCaps);
1087 1 : resumeAudioPlayback();
1088 : }
1089 2 : clock_gettime(CLOCK_MONOTONIC, &now);
1090 2 : reconfigDelayMs = now.tv_nsec > ts.tv_nsec ? (now.tv_nsec - ts.tv_nsec) / 1000000
1091 0 : : (1000 - (ts.tv_nsec - now.tv_nsec) / 1000000);
1092 2 : (*pAudioChangeTargetPts) += (reconfigDelayMs + kAudioChangeGapThresholdMS);
1093 : }
1094 : else
1095 : {
1096 0 : RIALTO_SERVER_LOG_DEBUG("first audio after change no attribute drop!");
1097 0 : *pui32Delay = 0;
1098 0 : *ret = false;
1099 0 : return true;
1100 : }
1101 2 : *ret = true;
1102 2 : return true;
1103 : }
1104 :
1105 1 : bool GstGenericPlayer::setImmediateOutput(const MediaSourceType &mediaSourceType, bool immediateOutputParam)
1106 : {
1107 1 : if (!m_workerThread)
1108 0 : return false;
1109 :
1110 2 : m_workerThread->enqueueTask(
1111 2 : m_taskFactory->createSetImmediateOutput(m_context, *this, mediaSourceType, immediateOutputParam));
1112 1 : return true;
1113 : }
1114 :
1115 1 : bool GstGenericPlayer::setReportDecodeErrors(const MediaSourceType &mediaSourceType, bool reportDecodeErrors)
1116 : {
1117 1 : if (!m_workerThread)
1118 0 : return false;
1119 :
1120 2 : m_workerThread->enqueueTask(
1121 2 : m_taskFactory->createSetReportDecodeErrors(m_context, *this, mediaSourceType, reportDecodeErrors));
1122 1 : return true;
1123 : }
1124 :
1125 2 : bool GstGenericPlayer::getQueuedFrames(uint32_t &queuedFrames)
1126 : {
1127 2 : bool returnValue{false};
1128 2 : GstElement *decoder{getDecoder(MediaSourceType::VIDEO)};
1129 2 : if (decoder)
1130 : {
1131 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "queued-frames"))
1132 : {
1133 1 : m_glibWrapper->gObjectGet(decoder, "queued-frames", &queuedFrames, nullptr);
1134 1 : returnValue = true;
1135 : }
1136 : else
1137 : {
1138 1 : RIALTO_SERVER_LOG_ERROR("queued-frames not supported in element %s", GST_ELEMENT_NAME(decoder));
1139 : }
1140 2 : m_gstWrapper->gstObjectUnref(decoder);
1141 : }
1142 : else
1143 : {
1144 0 : RIALTO_SERVER_LOG_ERROR("Failed to get queued-frames property, decoder is NULL");
1145 : }
1146 :
1147 2 : return returnValue;
1148 : }
1149 :
1150 5 : bool GstGenericPlayer::getImmediateOutput(const MediaSourceType &mediaSourceType, bool &immediateOutputRef)
1151 : {
1152 5 : bool returnValue{false};
1153 5 : GstElement *sink{getSink(mediaSourceType)};
1154 5 : if (sink)
1155 : {
1156 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "immediate-output"))
1157 : {
1158 2 : m_glibWrapper->gObjectGet(sink, "immediate-output", &immediateOutputRef, nullptr);
1159 2 : returnValue = true;
1160 : }
1161 : else
1162 : {
1163 1 : RIALTO_SERVER_LOG_ERROR("immediate-output not supported in element %s", GST_ELEMENT_NAME(sink));
1164 : }
1165 3 : m_gstWrapper->gstObjectUnref(sink);
1166 : }
1167 : else
1168 : {
1169 2 : RIALTO_SERVER_LOG_ERROR("Failed to set immediate-output property, sink is NULL");
1170 : }
1171 :
1172 5 : return returnValue;
1173 : }
1174 :
1175 5 : bool GstGenericPlayer::getStats(const MediaSourceType &mediaSourceType, uint64_t &renderedFrames, uint64_t &droppedFrames)
1176 : {
1177 5 : bool returnValue{false};
1178 5 : GstElement *sink{getSink(mediaSourceType)};
1179 5 : if (sink)
1180 : {
1181 3 : GstStructure *stats{nullptr};
1182 3 : m_glibWrapper->gObjectGet(sink, "stats", &stats, nullptr);
1183 3 : if (!stats)
1184 : {
1185 1 : RIALTO_SERVER_LOG_ERROR("failed to get stats from '%s'", GST_ELEMENT_NAME(sink));
1186 : }
1187 : else
1188 : {
1189 : guint64 renderedFramesTmp;
1190 : guint64 droppedFramesTmp;
1191 3 : if (m_gstWrapper->gstStructureGetUint64(stats, "rendered", &renderedFramesTmp) &&
1192 1 : m_gstWrapper->gstStructureGetUint64(stats, "dropped", &droppedFramesTmp))
1193 : {
1194 1 : renderedFrames = renderedFramesTmp;
1195 1 : droppedFrames = droppedFramesTmp;
1196 1 : returnValue = true;
1197 : }
1198 : else
1199 : {
1200 1 : RIALTO_SERVER_LOG_ERROR("failed to get 'rendered' or 'dropped' from structure (%s)",
1201 : GST_ELEMENT_NAME(sink));
1202 : }
1203 2 : m_gstWrapper->gstStructureFree(stats);
1204 : }
1205 3 : m_gstWrapper->gstObjectUnref(sink);
1206 : }
1207 : else
1208 : {
1209 2 : RIALTO_SERVER_LOG_ERROR("Failed to get stats, sink is NULL");
1210 : }
1211 :
1212 5 : return returnValue;
1213 : }
1214 :
1215 4 : GstBuffer *GstGenericPlayer::createBuffer(const IMediaPipeline::MediaSegment &mediaSegment) const
1216 : {
1217 4 : GstBuffer *gstBuffer = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getDataLength(), nullptr);
1218 4 : m_gstWrapper->gstBufferFill(gstBuffer, 0, mediaSegment.getData(), mediaSegment.getDataLength());
1219 :
1220 4 : if (mediaSegment.isEncrypted())
1221 : {
1222 3 : GstBuffer *keyId = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getKeyId().size(), nullptr);
1223 3 : m_gstWrapper->gstBufferFill(keyId, 0, mediaSegment.getKeyId().data(), mediaSegment.getKeyId().size());
1224 :
1225 3 : GstBuffer *initVector = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getInitVector().size(), nullptr);
1226 6 : m_gstWrapper->gstBufferFill(initVector, 0, mediaSegment.getInitVector().data(),
1227 3 : mediaSegment.getInitVector().size());
1228 3 : GstBuffer *subsamples{nullptr};
1229 3 : if (!mediaSegment.getSubSamples().empty())
1230 : {
1231 3 : auto subsamplesRawSize = mediaSegment.getSubSamples().size() * (sizeof(guint16) + sizeof(guint32));
1232 3 : guint8 *subsamplesRaw = static_cast<guint8 *>(m_glibWrapper->gMalloc(subsamplesRawSize));
1233 : GstByteWriter writer;
1234 3 : m_gstWrapper->gstByteWriterInitWithData(&writer, subsamplesRaw, subsamplesRawSize, FALSE);
1235 :
1236 6 : for (const auto &subSample : mediaSegment.getSubSamples())
1237 : {
1238 3 : m_gstWrapper->gstByteWriterPutUint16Be(&writer, subSample.numClearBytes);
1239 3 : m_gstWrapper->gstByteWriterPutUint32Be(&writer, subSample.numEncryptedBytes);
1240 : }
1241 3 : subsamples = m_gstWrapper->gstBufferNewWrapped(subsamplesRaw, subsamplesRawSize);
1242 : }
1243 :
1244 3 : uint32_t crypt = 0;
1245 3 : uint32_t skip = 0;
1246 3 : bool encryptionPatternSet = mediaSegment.getEncryptionPattern(crypt, skip);
1247 :
1248 3 : GstRialtoProtectionData data = {mediaSegment.getMediaKeySessionId(),
1249 3 : static_cast<uint32_t>(mediaSegment.getSubSamples().size()),
1250 3 : mediaSegment.getInitWithLast15(),
1251 : keyId,
1252 : initVector,
1253 : subsamples,
1254 6 : mediaSegment.getCipherMode(),
1255 : crypt,
1256 : skip,
1257 : encryptionPatternSet,
1258 6 : m_context.decryptionService};
1259 :
1260 3 : if (!m_protectionMetadataWrapper->addProtectionMetadata(gstBuffer, data))
1261 : {
1262 1 : RIALTO_SERVER_LOG_ERROR("Failed to add protection metadata");
1263 1 : if (keyId)
1264 : {
1265 1 : m_gstWrapper->gstBufferUnref(keyId);
1266 : }
1267 1 : if (initVector)
1268 : {
1269 1 : m_gstWrapper->gstBufferUnref(initVector);
1270 : }
1271 1 : if (subsamples)
1272 : {
1273 1 : m_gstWrapper->gstBufferUnref(subsamples);
1274 : }
1275 : }
1276 : }
1277 :
1278 4 : GST_BUFFER_TIMESTAMP(gstBuffer) = mediaSegment.getTimeStamp();
1279 4 : GST_BUFFER_DURATION(gstBuffer) = mediaSegment.getDuration();
1280 4 : return gstBuffer;
1281 : }
1282 :
1283 4 : void GstGenericPlayer::notifyNeedMediaData(const MediaSourceType mediaSource)
1284 : {
1285 4 : auto elem = m_context.streamInfo.find(mediaSource);
1286 4 : if (elem != m_context.streamInfo.end())
1287 : {
1288 2 : StreamInfo &streamInfo = elem->second;
1289 2 : streamInfo.isNeedDataPending = false;
1290 :
1291 : // Send new NeedMediaData if we still need it
1292 2 : if (m_gstPlayerClient && streamInfo.isDataNeeded)
1293 : {
1294 2 : streamInfo.isNeedDataPending = m_gstPlayerClient->notifyNeedMediaData(mediaSource);
1295 : }
1296 : }
1297 : else
1298 : {
1299 2 : RIALTO_SERVER_LOG_WARN("Media type %s could not be found", common::convertMediaSourceType(mediaSource));
1300 : }
1301 4 : }
1302 :
1303 19 : void GstGenericPlayer::attachData(const firebolt::rialto::MediaSourceType mediaType)
1304 : {
1305 19 : auto elem = m_context.streamInfo.find(mediaType);
1306 19 : if (elem != m_context.streamInfo.end())
1307 : {
1308 16 : StreamInfo &streamInfo = elem->second;
1309 16 : if (streamInfo.buffers.empty() || !streamInfo.isDataNeeded)
1310 : {
1311 2 : return;
1312 : }
1313 :
1314 14 : if (firebolt::rialto::MediaSourceType::SUBTITLE == mediaType)
1315 : {
1316 2 : setTextTrackPositionIfRequired(streamInfo.appSrc);
1317 : }
1318 : else
1319 : {
1320 36 : pushSampleIfRequired(streamInfo.appSrc, common::convertMediaSourceType(mediaType));
1321 : }
1322 14 : if (mediaType == firebolt::rialto::MediaSourceType::AUDIO)
1323 : {
1324 : // This needs to be done before gstAppSrcPushBuffer() is
1325 : // called because it can free the memory
1326 7 : m_context.lastAudioSampleTimestamps = static_cast<int64_t>(GST_BUFFER_PTS(streamInfo.buffers.back()));
1327 : }
1328 :
1329 28 : for (GstBuffer *buffer : streamInfo.buffers)
1330 : {
1331 14 : m_gstWrapper->gstAppSrcPushBuffer(GST_APP_SRC(streamInfo.appSrc), buffer);
1332 : }
1333 14 : streamInfo.buffers.clear();
1334 14 : streamInfo.isDataPushed = true;
1335 :
1336 14 : const bool kIsSingle = m_context.streamInfo.size() == 1;
1337 14 : bool allOtherStreamsPushed = std::all_of(m_context.streamInfo.begin(), m_context.streamInfo.end(),
1338 15 : [](const auto &entry) { return entry.second.isDataPushed; });
1339 :
1340 14 : if (!m_context.bufferedNotificationSent && (allOtherStreamsPushed || kIsSingle) && m_gstPlayerClient)
1341 : {
1342 1 : m_context.bufferedNotificationSent = true;
1343 1 : m_gstPlayerClient->notifyNetworkState(NetworkState::BUFFERED);
1344 1 : RIALTO_SERVER_LOG_MIL("Buffered NetworkState reached");
1345 : }
1346 14 : cancelUnderflow(mediaType);
1347 :
1348 14 : const auto eosInfoIt = m_context.endOfStreamInfo.find(mediaType);
1349 14 : if (eosInfoIt != m_context.endOfStreamInfo.end() && eosInfoIt->second == EosState::PENDING)
1350 : {
1351 0 : setEos(mediaType);
1352 : }
1353 : }
1354 : }
1355 :
1356 7 : void GstGenericPlayer::updateAudioCaps(int32_t rate, int32_t channels, const std::shared_ptr<CodecData> &codecData)
1357 : {
1358 7 : auto elem = m_context.streamInfo.find(firebolt::rialto::MediaSourceType::AUDIO);
1359 7 : if (elem != m_context.streamInfo.end())
1360 : {
1361 6 : StreamInfo &streamInfo = elem->second;
1362 :
1363 6 : constexpr int kInvalidRate{0}, kInvalidChannels{0};
1364 6 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(streamInfo.appSrc));
1365 6 : GstCaps *newCaps = m_gstWrapper->gstCapsCopy(currentCaps);
1366 :
1367 6 : if (rate != kInvalidRate)
1368 : {
1369 3 : m_gstWrapper->gstCapsSetSimple(newCaps, "rate", G_TYPE_INT, rate, NULL);
1370 : }
1371 :
1372 6 : if (channels != kInvalidChannels)
1373 : {
1374 3 : m_gstWrapper->gstCapsSetSimple(newCaps, "channels", G_TYPE_INT, channels, NULL);
1375 : }
1376 :
1377 6 : setCodecData(newCaps, codecData);
1378 :
1379 6 : if (!m_gstWrapper->gstCapsIsEqual(currentCaps, newCaps))
1380 : {
1381 5 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(streamInfo.appSrc), newCaps);
1382 : }
1383 :
1384 6 : m_gstWrapper->gstCapsUnref(newCaps);
1385 6 : m_gstWrapper->gstCapsUnref(currentCaps);
1386 : }
1387 7 : }
1388 :
1389 8 : void GstGenericPlayer::updateVideoCaps(int32_t width, int32_t height, Fraction frameRate,
1390 : const std::shared_ptr<CodecData> &codecData)
1391 : {
1392 8 : auto elem = m_context.streamInfo.find(firebolt::rialto::MediaSourceType::VIDEO);
1393 8 : if (elem != m_context.streamInfo.end())
1394 : {
1395 7 : StreamInfo &streamInfo = elem->second;
1396 :
1397 7 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(streamInfo.appSrc));
1398 7 : GstCaps *newCaps = m_gstWrapper->gstCapsCopy(currentCaps);
1399 :
1400 7 : if (width > 0)
1401 : {
1402 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "width", G_TYPE_INT, width, NULL);
1403 : }
1404 :
1405 7 : if (height > 0)
1406 : {
1407 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "height", G_TYPE_INT, height, NULL);
1408 : }
1409 :
1410 7 : if ((kUndefinedSize != frameRate.numerator) && (kUndefinedSize != frameRate.denominator))
1411 : {
1412 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "framerate", GST_TYPE_FRACTION, frameRate.numerator,
1413 : frameRate.denominator, NULL);
1414 : }
1415 :
1416 7 : setCodecData(newCaps, codecData);
1417 :
1418 7 : if (!m_gstWrapper->gstCapsIsEqual(currentCaps, newCaps))
1419 : {
1420 6 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(streamInfo.appSrc), newCaps);
1421 : }
1422 :
1423 7 : m_gstWrapper->gstCapsUnref(currentCaps);
1424 7 : m_gstWrapper->gstCapsUnref(newCaps);
1425 : }
1426 8 : }
1427 :
1428 5 : void GstGenericPlayer::addAudioClippingToBuffer(GstBuffer *buffer, uint64_t clippingStart, uint64_t clippingEnd) const
1429 : {
1430 5 : if (clippingStart || clippingEnd)
1431 : {
1432 4 : if (m_gstWrapper->gstBufferAddAudioClippingMeta(buffer, GST_FORMAT_TIME, clippingStart, clippingEnd))
1433 : {
1434 3 : RIALTO_SERVER_LOG_DEBUG("Added audio clipping to buffer %p, start: %" PRIu64 ", end %" PRIu64, buffer,
1435 : clippingStart, clippingEnd);
1436 : }
1437 : else
1438 : {
1439 1 : RIALTO_SERVER_LOG_WARN("Failed to add audio clipping to buffer %p, start: %" PRIu64 ", end %" PRIu64,
1440 : buffer, clippingStart, clippingEnd);
1441 : }
1442 : }
1443 5 : }
1444 :
1445 13 : bool GstGenericPlayer::setCodecData(GstCaps *caps, const std::shared_ptr<CodecData> &codecData) const
1446 : {
1447 13 : if (codecData && CodecDataType::BUFFER == codecData->type)
1448 : {
1449 7 : gpointer memory = m_glibWrapper->gMemdup(codecData->data.data(), codecData->data.size());
1450 7 : GstBuffer *buf = m_gstWrapper->gstBufferNewWrapped(memory, codecData->data.size());
1451 7 : m_gstWrapper->gstCapsSetSimple(caps, "codec_data", GST_TYPE_BUFFER, buf, nullptr);
1452 7 : m_gstWrapper->gstBufferUnref(buf);
1453 7 : return true;
1454 : }
1455 6 : if (codecData && CodecDataType::STRING == codecData->type)
1456 : {
1457 2 : std::string codecDataStr(codecData->data.begin(), codecData->data.end());
1458 2 : m_gstWrapper->gstCapsSetSimple(caps, "codec_data", G_TYPE_STRING, codecDataStr.c_str(), nullptr);
1459 2 : return true;
1460 : }
1461 4 : return false;
1462 : }
1463 :
1464 12 : void GstGenericPlayer::pushSampleIfRequired(GstElement *source, const std::string &typeStr)
1465 : {
1466 12 : auto initialPosition = m_context.initialPositions.find(source);
1467 12 : if (m_context.initialPositions.end() == initialPosition)
1468 : {
1469 : // Sending initial sample not needed
1470 7 : return;
1471 : }
1472 : // GstAppSrc does not replace segment, if it's the same as previous one.
1473 : // It causes problems with position reporing in amlogic devices, so we need to push
1474 : // two segments with different reset time value.
1475 5 : pushAdditionalSegmentIfRequired(source);
1476 :
1477 10 : for (const auto &[position, resetTime, appliedRate, stopPosition] : initialPosition->second)
1478 : {
1479 6 : GstSeekFlags seekFlag = resetTime ? GST_SEEK_FLAG_FLUSH : GST_SEEK_FLAG_NONE;
1480 6 : RIALTO_SERVER_LOG_DEBUG("Pushing new %s sample...", typeStr.c_str());
1481 6 : GstSegment *segment{m_gstWrapper->gstSegmentNew()};
1482 6 : m_gstWrapper->gstSegmentInit(segment, GST_FORMAT_TIME);
1483 6 : if (!m_gstWrapper->gstSegmentDoSeek(segment, m_context.playbackRate, GST_FORMAT_TIME, seekFlag,
1484 : GST_SEEK_TYPE_SET, position, GST_SEEK_TYPE_SET, stopPosition, nullptr))
1485 : {
1486 1 : RIALTO_SERVER_LOG_WARN("Segment seek failed.");
1487 1 : m_gstWrapper->gstSegmentFree(segment);
1488 1 : m_context.initialPositions.erase(initialPosition);
1489 1 : return;
1490 : }
1491 5 : segment->applied_rate = appliedRate;
1492 5 : RIALTO_SERVER_LOG_MIL("New %s segment: [%" GST_TIME_FORMAT ", %" GST_TIME_FORMAT
1493 : "], rate: %f, appliedRate %f, reset_time: %d\n",
1494 : typeStr.c_str(), GST_TIME_ARGS(segment->start), GST_TIME_ARGS(segment->stop),
1495 : segment->rate, segment->applied_rate, resetTime);
1496 :
1497 5 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(source));
1498 : // We can't pass buffer in GstSample, because implementation of gst_app_src_push_sample
1499 : // uses gst_buffer_copy, which loses RialtoProtectionMeta (that causes problems with EME
1500 : // for first frame).
1501 5 : GstSample *sample = m_gstWrapper->gstSampleNew(nullptr, currentCaps, segment, nullptr);
1502 5 : m_gstWrapper->gstAppSrcPushSample(GST_APP_SRC(source), sample);
1503 5 : m_gstWrapper->gstSampleUnref(sample);
1504 5 : m_gstWrapper->gstCapsUnref(currentCaps);
1505 :
1506 5 : m_gstWrapper->gstSegmentFree(segment);
1507 : }
1508 4 : m_context.currentPosition[source] = initialPosition->second.back();
1509 4 : m_context.initialPositions.erase(initialPosition);
1510 4 : return;
1511 : }
1512 :
1513 5 : void GstGenericPlayer::pushAdditionalSegmentIfRequired(GstElement *source)
1514 : {
1515 5 : auto currentPosition = m_context.currentPosition.find(source);
1516 5 : if (m_context.currentPosition.end() == currentPosition)
1517 : {
1518 4 : return;
1519 : }
1520 1 : auto initialPosition = m_context.initialPositions.find(source);
1521 1 : if (m_context.initialPositions.end() == initialPosition)
1522 : {
1523 0 : return;
1524 : }
1525 2 : if (initialPosition->second.size() == 1 && initialPosition->second.back().resetTime &&
1526 1 : currentPosition->second == initialPosition->second.back())
1527 : {
1528 1 : RIALTO_SERVER_LOG_INFO("Adding additional segment with reset_time = false");
1529 1 : SegmentData additionalSegment = initialPosition->second.back();
1530 1 : additionalSegment.resetTime = false;
1531 1 : initialPosition->second.push_back(additionalSegment);
1532 : }
1533 : }
1534 :
1535 2 : void GstGenericPlayer::setTextTrackPositionIfRequired(GstElement *source)
1536 : {
1537 2 : auto initialPosition = m_context.initialPositions.find(source);
1538 2 : if (m_context.initialPositions.end() == initialPosition)
1539 : {
1540 : // Sending initial sample not needed
1541 1 : return;
1542 : }
1543 :
1544 1 : RIALTO_SERVER_LOG_MIL("New subtitle position set %" GST_TIME_FORMAT,
1545 : GST_TIME_ARGS(initialPosition->second.back().position));
1546 1 : m_glibWrapper->gObjectSet(m_context.subtitleSink, "position",
1547 1 : static_cast<guint64>(initialPosition->second.back().position), nullptr);
1548 :
1549 1 : m_context.initialPositions.erase(initialPosition);
1550 : }
1551 :
1552 9 : bool GstGenericPlayer::reattachSource(const std::unique_ptr<IMediaPipeline::MediaSource> &source)
1553 : {
1554 9 : if (m_context.streamInfo.find(source->getType()) == m_context.streamInfo.end())
1555 : {
1556 1 : RIALTO_SERVER_LOG_ERROR("Unable to switch source, type does not exist");
1557 1 : return false;
1558 : }
1559 8 : if (source->getMimeType().empty())
1560 : {
1561 1 : RIALTO_SERVER_LOG_WARN("Skip switch audio source. Unknown mime type");
1562 1 : return false;
1563 : }
1564 7 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate> audioAttributes{createAudioAttributes(source)};
1565 7 : if (!audioAttributes)
1566 : {
1567 1 : RIALTO_SERVER_LOG_ERROR("Failed to create audio attributes");
1568 1 : return false;
1569 : }
1570 :
1571 6 : long long currentDispPts = getPosition(m_context.pipeline); // NOLINT(runtime/int)
1572 6 : GstCaps *caps{createCapsFromMediaSource(m_gstWrapper, m_glibWrapper, source)};
1573 6 : GstAppSrc *appSrc{GST_APP_SRC(m_context.streamInfo[source->getType()].appSrc)};
1574 6 : GstCaps *oldCaps = m_gstWrapper->gstAppSrcGetCaps(appSrc);
1575 :
1576 6 : if ((!oldCaps) || (!m_gstWrapper->gstCapsIsEqual(caps, oldCaps)))
1577 : {
1578 5 : RIALTO_SERVER_LOG_DEBUG("Caps not equal. Perform audio track codec channel switch.");
1579 :
1580 5 : GstElement *sink = getSink(MediaSourceType::AUDIO);
1581 5 : if (!sink)
1582 : {
1583 0 : RIALTO_SERVER_LOG_ERROR("Failed to get audio sink");
1584 0 : if (caps)
1585 0 : m_gstWrapper->gstCapsUnref(caps);
1586 0 : if (oldCaps)
1587 0 : m_gstWrapper->gstCapsUnref(oldCaps);
1588 0 : return false;
1589 : }
1590 5 : std::string sinkName = GST_ELEMENT_NAME(sink);
1591 5 : m_gstWrapper->gstObjectUnref(sink);
1592 :
1593 5 : int sampleAttributes{
1594 : 0}; // rdk_gstreamer_utils::performAudioTrackCodecChannelSwitch checks if this param != NULL only.
1595 5 : std::uint32_t status{0}; // must be 0 to make rdk_gstreamer_utils::performAudioTrackCodecChannelSwitch work
1596 5 : unsigned int ui32Delay{0}; // output param
1597 5 : long long audioChangeTargetPts{-1}; // NOLINT(runtime/int) output param. Set audioChangeTargetPts =
1598 : // currentDispPts in rdk_gstreamer_utils function stub
1599 5 : unsigned int audioChangeStage{0}; // Output param. Set to AUDCHG_ALIGN in rdk_gstreamer_utils function stub
1600 5 : gchar *oldCapsCStr = m_gstWrapper->gstCapsToString(oldCaps);
1601 5 : std::string oldCapsStr = std::string(oldCapsCStr);
1602 5 : m_glibWrapper->gFree(oldCapsCStr);
1603 5 : bool audioAac{oldCapsStr.find("audio/mpeg") != std::string::npos};
1604 5 : bool svpEnabled{true}; // assume always true
1605 5 : bool retVal{false}; // Output param. Set to TRUE in rdk_gstreamer_utils function stub
1606 :
1607 5 : bool result = false;
1608 5 : if (m_glibWrapper->gStrHasPrefix(sinkName.c_str(), "amlhalasink"))
1609 : {
1610 : // due to problems audio codec change in prerolling, temporarily moved the code from rdk gstreamer utils to
1611 : // Rialto and applied fixes
1612 2 : result = performAudioTrackCodecChannelSwitch(&sampleAttributes, &(*audioAttributes), &status, &ui32Delay,
1613 : &audioChangeTargetPts, ¤tDispPts, &audioChangeStage,
1614 2 : &caps, &audioAac, svpEnabled, GST_ELEMENT(appSrc), &retVal);
1615 : }
1616 : else
1617 : {
1618 6 : result = m_rdkGstreamerUtilsWrapper->performAudioTrackCodecChannelSwitch(&m_context.playbackGroup,
1619 : &sampleAttributes,
1620 3 : &(*audioAttributes), &status,
1621 : &ui32Delay, &audioChangeTargetPts,
1622 : ¤tDispPts, &audioChangeStage,
1623 : &caps, &audioAac, svpEnabled,
1624 3 : GST_ELEMENT(appSrc), &retVal);
1625 : }
1626 :
1627 5 : if (!result || !retVal)
1628 : {
1629 3 : RIALTO_SERVER_LOG_WARN("performAudioTrackCodecChannelSwitch failed! Result: %d, retval %d", result, retVal);
1630 : }
1631 5 : }
1632 : else
1633 : {
1634 1 : RIALTO_SERVER_LOG_DEBUG("Skip switching audio source - caps are the same.");
1635 : }
1636 :
1637 6 : m_context.lastAudioSampleTimestamps = currentDispPts;
1638 6 : if (caps)
1639 6 : m_gstWrapper->gstCapsUnref(caps);
1640 6 : if (oldCaps)
1641 6 : m_gstWrapper->gstCapsUnref(oldCaps);
1642 :
1643 6 : return true;
1644 7 : }
1645 :
1646 0 : bool GstGenericPlayer::hasSourceType(const MediaSourceType &mediaSourceType) const
1647 : {
1648 0 : return m_context.streamInfo.find(mediaSourceType) != m_context.streamInfo.end();
1649 : }
1650 :
1651 91 : void GstGenericPlayer::scheduleNeedMediaData(GstAppSrc *src)
1652 : {
1653 91 : if (m_workerThread)
1654 : {
1655 91 : m_workerThread->enqueueTask(m_taskFactory->createNeedData(m_context, *this, src));
1656 : }
1657 : }
1658 :
1659 1 : void GstGenericPlayer::scheduleEnoughData(GstAppSrc *src)
1660 : {
1661 1 : if (m_workerThread)
1662 : {
1663 1 : m_workerThread->enqueueTask(m_taskFactory->createEnoughData(m_context, src));
1664 : }
1665 : }
1666 :
1667 2 : void GstGenericPlayer::scheduleAudioUnderflow()
1668 : {
1669 2 : if (m_workerThread)
1670 : {
1671 2 : bool underflowEnabled = m_context.isPlaying;
1672 4 : m_workerThread->enqueueTask(
1673 4 : m_taskFactory->createUnderflow(m_context, *this, underflowEnabled, MediaSourceType::AUDIO));
1674 : }
1675 2 : }
1676 :
1677 2 : void GstGenericPlayer::scheduleVideoUnderflow()
1678 : {
1679 2 : if (m_workerThread)
1680 : {
1681 2 : bool underflowEnabled = m_context.isPlaying;
1682 4 : m_workerThread->enqueueTask(
1683 4 : m_taskFactory->createUnderflow(m_context, *this, underflowEnabled, MediaSourceType::VIDEO));
1684 : }
1685 2 : }
1686 :
1687 1 : void GstGenericPlayer::scheduleAllSourcesAttached()
1688 : {
1689 1 : allSourcesAttached();
1690 : }
1691 :
1692 14 : void GstGenericPlayer::cancelUnderflow(firebolt::rialto::MediaSourceType mediaSource)
1693 : {
1694 14 : auto elem = m_context.streamInfo.find(mediaSource);
1695 14 : if (elem != m_context.streamInfo.end())
1696 : {
1697 14 : StreamInfo &streamInfo = elem->second;
1698 14 : if (!streamInfo.underflowOccured)
1699 : {
1700 11 : return;
1701 : }
1702 :
1703 3 : RIALTO_SERVER_LOG_DEBUG("Cancelling %s underflow", common::convertMediaSourceType(mediaSource));
1704 3 : streamInfo.underflowOccured = false;
1705 : }
1706 : }
1707 :
1708 3 : void GstGenericPlayer::play(bool &async)
1709 : {
1710 3 : if (0 == m_ongoingStateChangesNumber)
1711 : {
1712 : // Operation called on main thread, because PAUSED->PLAYING change is synchronous and needs to be done fast.
1713 : //
1714 : // m_context.pipeline can be used, because it's modified only in GstGenericPlayer
1715 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
1716 2 : ++m_ongoingStateChangesNumber;
1717 2 : async = (changePipelineState(GST_STATE_PLAYING) == GST_STATE_CHANGE_ASYNC);
1718 2 : RIALTO_SERVER_LOG_MIL("State change to PLAYING requested");
1719 : }
1720 : else
1721 : {
1722 1 : ++m_ongoingStateChangesNumber;
1723 1 : async = true;
1724 1 : if (m_workerThread)
1725 : {
1726 1 : m_workerThread->enqueueTask(m_taskFactory->createPlay(*this));
1727 : }
1728 : }
1729 3 : }
1730 :
1731 2 : void GstGenericPlayer::pause()
1732 : {
1733 2 : ++m_ongoingStateChangesNumber;
1734 2 : if (m_workerThread)
1735 : {
1736 2 : m_workerThread->enqueueTask(m_taskFactory->createPause(m_context, *this));
1737 : }
1738 : }
1739 :
1740 1 : void GstGenericPlayer::stop()
1741 : {
1742 1 : ++m_ongoingStateChangesNumber;
1743 1 : if (m_workerThread)
1744 : {
1745 1 : m_workerThread->enqueueTask(m_taskFactory->createStop(m_context, *this));
1746 : }
1747 : }
1748 :
1749 6 : GstStateChangeReturn GstGenericPlayer::changePipelineState(GstState newState)
1750 : {
1751 6 : if (!m_context.pipeline)
1752 : {
1753 1 : RIALTO_SERVER_LOG_ERROR("Change state failed - pipeline is nullptr");
1754 1 : if (m_gstPlayerClient)
1755 1 : m_gstPlayerClient->notifyPlaybackState(PlaybackState::FAILURE);
1756 1 : --m_ongoingStateChangesNumber;
1757 1 : return GST_STATE_CHANGE_FAILURE;
1758 : }
1759 5 : m_context.flushOnPrerollController->setTargetState(newState);
1760 5 : const GstStateChangeReturn result{m_gstWrapper->gstElementSetState(m_context.pipeline, newState)};
1761 5 : if (result == GST_STATE_CHANGE_FAILURE)
1762 : {
1763 1 : RIALTO_SERVER_LOG_ERROR("Change state failed - Gstreamer returned an error");
1764 1 : if (m_gstPlayerClient)
1765 1 : m_gstPlayerClient->notifyPlaybackState(PlaybackState::FAILURE);
1766 : }
1767 5 : --m_ongoingStateChangesNumber;
1768 5 : return result;
1769 : }
1770 :
1771 17 : int64_t GstGenericPlayer::getPosition(GstElement *element)
1772 : {
1773 17 : if (!element)
1774 : {
1775 1 : RIALTO_SERVER_LOG_WARN("Element is null");
1776 1 : return -1;
1777 : }
1778 :
1779 16 : m_gstWrapper->gstStateLock(element);
1780 :
1781 32 : if (m_gstWrapper->gstElementGetState(element) < GST_STATE_PAUSED ||
1782 16 : (m_gstWrapper->gstElementGetStateReturn(element) == GST_STATE_CHANGE_ASYNC &&
1783 1 : m_gstWrapper->gstElementGetStateNext(element) == GST_STATE_PAUSED))
1784 : {
1785 1 : RIALTO_SERVER_LOG_WARN("Element is prerolling or in invalid state - state: %s, return: %s, next: %s",
1786 : m_gstWrapper->gstElementStateGetName(m_gstWrapper->gstElementGetState(element)),
1787 : m_gstWrapper->gstElementStateChangeReturnGetName(
1788 : m_gstWrapper->gstElementGetStateReturn(element)),
1789 : m_gstWrapper->gstElementStateGetName(m_gstWrapper->gstElementGetStateNext(element)));
1790 :
1791 1 : m_gstWrapper->gstStateUnlock(element);
1792 1 : return -1;
1793 : }
1794 15 : m_gstWrapper->gstStateUnlock(element);
1795 :
1796 15 : gint64 position = -1;
1797 15 : if (!m_gstWrapper->gstElementQueryPosition(m_context.pipeline, GST_FORMAT_TIME, &position))
1798 : {
1799 1 : RIALTO_SERVER_LOG_WARN("Failed to query position");
1800 1 : return -1;
1801 : }
1802 :
1803 14 : return position;
1804 : }
1805 :
1806 1 : void GstGenericPlayer::setVideoGeometry(int x, int y, int width, int height)
1807 : {
1808 1 : if (m_workerThread)
1809 : {
1810 2 : m_workerThread->enqueueTask(
1811 2 : m_taskFactory->createSetVideoGeometry(m_context, *this, Rectangle{x, y, width, height}));
1812 : }
1813 1 : }
1814 :
1815 1 : void GstGenericPlayer::setEos(const firebolt::rialto::MediaSourceType &type)
1816 : {
1817 1 : if (m_workerThread)
1818 : {
1819 1 : m_workerThread->enqueueTask(m_taskFactory->createEos(m_context, *this, type));
1820 : }
1821 : }
1822 :
1823 4 : bool GstGenericPlayer::setVideoSinkRectangle()
1824 : {
1825 4 : bool result = false;
1826 4 : GstElement *videoSink{getSink(MediaSourceType::VIDEO)};
1827 4 : if (videoSink)
1828 : {
1829 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "rectangle"))
1830 : {
1831 : std::string rect =
1832 4 : std::to_string(m_context.pendingGeometry.x) + ',' + std::to_string(m_context.pendingGeometry.y) + ',' +
1833 6 : std::to_string(m_context.pendingGeometry.width) + ',' + std::to_string(m_context.pendingGeometry.height);
1834 2 : m_glibWrapper->gObjectSet(videoSink, "rectangle", rect.c_str(), nullptr);
1835 2 : m_context.pendingGeometry.clear();
1836 2 : result = true;
1837 : }
1838 : else
1839 : {
1840 1 : RIALTO_SERVER_LOG_ERROR("Failed to set the video rectangle");
1841 : }
1842 3 : m_gstWrapper->gstObjectUnref(videoSink);
1843 : }
1844 : else
1845 : {
1846 1 : RIALTO_SERVER_LOG_ERROR("Failed to set video rectangle, sink is NULL");
1847 : }
1848 :
1849 4 : return result;
1850 : }
1851 :
1852 3 : bool GstGenericPlayer::setImmediateOutput()
1853 : {
1854 3 : bool result{false};
1855 3 : if (m_context.pendingImmediateOutputForVideo.has_value())
1856 : {
1857 3 : GstElement *sink{getSink(MediaSourceType::VIDEO)};
1858 3 : if (sink)
1859 : {
1860 2 : bool immediateOutput{m_context.pendingImmediateOutputForVideo.value()};
1861 2 : RIALTO_SERVER_LOG_DEBUG("Set immediate-output to %s", immediateOutput ? "TRUE" : "FALSE");
1862 :
1863 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "immediate-output"))
1864 : {
1865 1 : gboolean immediateOutputGboolean{immediateOutput ? TRUE : FALSE};
1866 1 : m_glibWrapper->gObjectSet(sink, "immediate-output", immediateOutputGboolean, nullptr);
1867 1 : result = true;
1868 : }
1869 : else
1870 : {
1871 1 : RIALTO_SERVER_LOG_ERROR("Failed to set immediate-output property on sink '%s'", GST_ELEMENT_NAME(sink));
1872 : }
1873 2 : m_context.pendingImmediateOutputForVideo.reset();
1874 2 : m_gstWrapper->gstObjectUnref(sink);
1875 : }
1876 : else
1877 : {
1878 1 : RIALTO_SERVER_LOG_DEBUG("Pending an immediate-output, sink is NULL");
1879 : }
1880 : }
1881 3 : return result;
1882 : }
1883 :
1884 2 : bool GstGenericPlayer::setReportDecodeErrors()
1885 : {
1886 2 : bool result{false};
1887 2 : bool reportDecodeErrors{false};
1888 :
1889 : {
1890 2 : std::unique_lock lock{m_context.propertyMutex};
1891 2 : if (!m_context.pendingReportDecodeErrorsForVideo.has_value())
1892 : {
1893 0 : return false;
1894 : }
1895 2 : reportDecodeErrors = m_context.pendingReportDecodeErrorsForVideo.value();
1896 : }
1897 :
1898 2 : GstElement *decoder = getDecoder(MediaSourceType::VIDEO);
1899 2 : if (decoder)
1900 : {
1901 2 : RIALTO_SERVER_LOG_DEBUG("Set report decode errors to %s", reportDecodeErrors ? "TRUE" : "FALSE");
1902 :
1903 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "report-decode-errors"))
1904 : {
1905 1 : gboolean reportDecodeErrorsGboolean{reportDecodeErrors ? TRUE : FALSE};
1906 1 : m_glibWrapper->gObjectSet(decoder, "report-decode-errors", reportDecodeErrorsGboolean, nullptr);
1907 1 : result = true;
1908 : }
1909 : else
1910 : {
1911 1 : RIALTO_SERVER_LOG_ERROR("Failed to set report-decode-errors property on decoder '%s'",
1912 : GST_ELEMENT_NAME(decoder));
1913 : }
1914 :
1915 2 : m_gstWrapper->gstObjectUnref(decoder);
1916 :
1917 : {
1918 2 : std::unique_lock lock{m_context.propertyMutex};
1919 2 : m_context.pendingReportDecodeErrorsForVideo.reset();
1920 : }
1921 : }
1922 : else
1923 : {
1924 0 : RIALTO_SERVER_LOG_DEBUG("Pending report-decode-errors, decoder is NULL");
1925 : }
1926 2 : return result;
1927 : }
1928 :
1929 4 : bool GstGenericPlayer::setShowVideoWindow()
1930 : {
1931 4 : if (!m_context.pendingShowVideoWindow.has_value())
1932 : {
1933 1 : RIALTO_SERVER_LOG_WARN("No show video window value to be set. Aborting...");
1934 1 : return false;
1935 : }
1936 :
1937 3 : GstElement *videoSink{getSink(MediaSourceType::VIDEO)};
1938 3 : if (!videoSink)
1939 : {
1940 1 : RIALTO_SERVER_LOG_DEBUG("Setting show video window queued. Video sink is NULL");
1941 1 : return false;
1942 : }
1943 2 : bool result{false};
1944 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "show-video-window"))
1945 : {
1946 1 : m_glibWrapper->gObjectSet(videoSink, "show-video-window", m_context.pendingShowVideoWindow.value(), nullptr);
1947 1 : result = true;
1948 : }
1949 : else
1950 : {
1951 1 : RIALTO_SERVER_LOG_ERROR("Setting show video window failed. Property does not exist");
1952 : }
1953 2 : m_context.pendingShowVideoWindow.reset();
1954 2 : m_gstWrapper->gstObjectUnref(GST_OBJECT(videoSink));
1955 2 : return result;
1956 : }
1957 :
1958 4 : bool GstGenericPlayer::setLowLatency()
1959 : {
1960 4 : bool result{false};
1961 4 : if (m_context.pendingLowLatency.has_value())
1962 : {
1963 4 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1964 4 : if (sink)
1965 : {
1966 3 : bool lowLatency{m_context.pendingLowLatency.value()};
1967 3 : RIALTO_SERVER_LOG_DEBUG("Set low-latency to %s", lowLatency ? "TRUE" : "FALSE");
1968 :
1969 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "low-latency"))
1970 : {
1971 2 : gboolean lowLatencyGboolean{lowLatency ? TRUE : FALSE};
1972 2 : m_glibWrapper->gObjectSet(sink, "low-latency", lowLatencyGboolean, nullptr);
1973 2 : result = true;
1974 : }
1975 : else
1976 : {
1977 1 : RIALTO_SERVER_LOG_ERROR("Failed to set low-latency property on sink '%s'", GST_ELEMENT_NAME(sink));
1978 : }
1979 3 : m_context.pendingLowLatency.reset();
1980 3 : m_gstWrapper->gstObjectUnref(sink);
1981 : }
1982 : else
1983 : {
1984 1 : RIALTO_SERVER_LOG_DEBUG("Pending low-latency, sink is NULL");
1985 : }
1986 : }
1987 4 : return result;
1988 : }
1989 :
1990 3 : bool GstGenericPlayer::setSync()
1991 : {
1992 3 : bool result{false};
1993 3 : if (m_context.pendingSync.has_value())
1994 : {
1995 3 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1996 3 : if (sink)
1997 : {
1998 2 : bool sync{m_context.pendingSync.value()};
1999 2 : RIALTO_SERVER_LOG_DEBUG("Set sync to %s", sync ? "TRUE" : "FALSE");
2000 :
2001 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "sync"))
2002 : {
2003 1 : gboolean syncGboolean{sync ? TRUE : FALSE};
2004 1 : m_glibWrapper->gObjectSet(sink, "sync", syncGboolean, nullptr);
2005 1 : result = true;
2006 : }
2007 : else
2008 : {
2009 1 : RIALTO_SERVER_LOG_ERROR("Failed to set sync property on sink '%s'", GST_ELEMENT_NAME(sink));
2010 : }
2011 2 : m_context.pendingSync.reset();
2012 2 : m_gstWrapper->gstObjectUnref(sink);
2013 : }
2014 : else
2015 : {
2016 1 : RIALTO_SERVER_LOG_DEBUG("Pending sync, sink is NULL");
2017 : }
2018 : }
2019 3 : return result;
2020 : }
2021 :
2022 3 : bool GstGenericPlayer::setSyncOff()
2023 : {
2024 3 : bool result{false};
2025 3 : if (m_context.pendingSyncOff.has_value())
2026 : {
2027 3 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
2028 3 : if (decoder)
2029 : {
2030 2 : bool syncOff{m_context.pendingSyncOff.value()};
2031 2 : RIALTO_SERVER_LOG_DEBUG("Set sync-off to %s", syncOff ? "TRUE" : "FALSE");
2032 :
2033 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "sync-off"))
2034 : {
2035 1 : gboolean syncOffGboolean{decoder ? TRUE : FALSE};
2036 1 : m_glibWrapper->gObjectSet(decoder, "sync-off", syncOffGboolean, nullptr);
2037 1 : result = true;
2038 : }
2039 : else
2040 : {
2041 1 : RIALTO_SERVER_LOG_ERROR("Failed to set sync-off property on decoder '%s'", GST_ELEMENT_NAME(decoder));
2042 : }
2043 2 : m_context.pendingSyncOff.reset();
2044 2 : m_gstWrapper->gstObjectUnref(decoder);
2045 : }
2046 : else
2047 : {
2048 1 : RIALTO_SERVER_LOG_DEBUG("Pending sync-off, decoder is NULL");
2049 : }
2050 : }
2051 3 : return result;
2052 : }
2053 :
2054 6 : bool GstGenericPlayer::setStreamSyncMode(const MediaSourceType &type)
2055 : {
2056 6 : bool result{false};
2057 6 : int32_t streamSyncMode{0};
2058 : {
2059 6 : std::unique_lock lock{m_context.propertyMutex};
2060 6 : if (m_context.pendingStreamSyncMode.find(type) == m_context.pendingStreamSyncMode.end())
2061 : {
2062 0 : return false;
2063 : }
2064 6 : streamSyncMode = m_context.pendingStreamSyncMode[type];
2065 : }
2066 6 : if (MediaSourceType::AUDIO == type)
2067 : {
2068 3 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
2069 3 : if (!decoder)
2070 : {
2071 1 : RIALTO_SERVER_LOG_DEBUG("Pending stream-sync-mode, decoder is NULL");
2072 1 : return false;
2073 : }
2074 :
2075 2 : RIALTO_SERVER_LOG_DEBUG("Set stream-sync-mode to %d", streamSyncMode);
2076 :
2077 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "stream-sync-mode"))
2078 : {
2079 1 : gint streamSyncModeGint{static_cast<gint>(streamSyncMode)};
2080 1 : m_glibWrapper->gObjectSet(decoder, "stream-sync-mode", streamSyncModeGint, nullptr);
2081 1 : result = true;
2082 : }
2083 : else
2084 : {
2085 1 : RIALTO_SERVER_LOG_ERROR("Failed to set stream-sync-mode property on decoder '%s'", GST_ELEMENT_NAME(decoder));
2086 : }
2087 2 : m_gstWrapper->gstObjectUnref(decoder);
2088 2 : std::unique_lock lock{m_context.propertyMutex};
2089 2 : m_context.pendingStreamSyncMode.erase(type);
2090 : }
2091 3 : else if (MediaSourceType::VIDEO == type)
2092 : {
2093 3 : GstElement *parser = getParser(MediaSourceType::VIDEO);
2094 3 : if (!parser)
2095 : {
2096 1 : RIALTO_SERVER_LOG_DEBUG("Pending syncmode-streaming, parser is NULL");
2097 1 : return false;
2098 : }
2099 :
2100 2 : gboolean streamSyncModeBoolean{static_cast<gboolean>(streamSyncMode)};
2101 2 : RIALTO_SERVER_LOG_DEBUG("Set syncmode-streaming to %d", streamSyncMode);
2102 :
2103 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(parser), "syncmode-streaming"))
2104 : {
2105 1 : m_glibWrapper->gObjectSet(parser, "syncmode-streaming", streamSyncModeBoolean, nullptr);
2106 1 : result = true;
2107 : }
2108 : else
2109 : {
2110 1 : RIALTO_SERVER_LOG_ERROR("Failed to set syncmode-streaming property on parser '%s'", GST_ELEMENT_NAME(parser));
2111 : }
2112 2 : m_gstWrapper->gstObjectUnref(parser);
2113 2 : std::unique_lock lock{m_context.propertyMutex};
2114 2 : m_context.pendingStreamSyncMode.erase(type);
2115 : }
2116 4 : return result;
2117 : }
2118 :
2119 3 : bool GstGenericPlayer::setRenderFrame()
2120 : {
2121 3 : bool result{false};
2122 3 : if (m_context.pendingRenderFrame)
2123 : {
2124 5 : static const std::string kStepOnPrerollPropertyName = "frame-step-on-preroll";
2125 3 : GstElement *sink{getSink(MediaSourceType::VIDEO)};
2126 3 : if (sink)
2127 : {
2128 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), kStepOnPrerollPropertyName.c_str()))
2129 : {
2130 1 : RIALTO_SERVER_LOG_INFO("Rendering preroll");
2131 :
2132 1 : m_glibWrapper->gObjectSet(sink, kStepOnPrerollPropertyName.c_str(), 1, nullptr);
2133 1 : m_gstWrapper->gstElementSendEvent(sink, m_gstWrapper->gstEventNewStep(GST_FORMAT_BUFFERS, 1, 1.0, true,
2134 : false));
2135 1 : m_glibWrapper->gObjectSet(sink, kStepOnPrerollPropertyName.c_str(), 0, nullptr);
2136 1 : result = true;
2137 : }
2138 : else
2139 : {
2140 1 : RIALTO_SERVER_LOG_ERROR("Video sink doesn't have property `%s`", kStepOnPrerollPropertyName.c_str());
2141 : }
2142 2 : m_gstWrapper->gstObjectUnref(sink);
2143 2 : m_context.pendingRenderFrame = false;
2144 : }
2145 : else
2146 : {
2147 1 : RIALTO_SERVER_LOG_DEBUG("Pending render frame, sink is NULL");
2148 : }
2149 : }
2150 3 : return result;
2151 : }
2152 :
2153 3 : bool GstGenericPlayer::setBufferingLimit()
2154 : {
2155 3 : bool result{false};
2156 3 : guint bufferingLimit{0};
2157 : {
2158 3 : std::unique_lock lock{m_context.propertyMutex};
2159 3 : if (!m_context.pendingBufferingLimit.has_value())
2160 : {
2161 0 : return false;
2162 : }
2163 3 : bufferingLimit = static_cast<guint>(m_context.pendingBufferingLimit.value());
2164 : }
2165 :
2166 3 : GstElement *decoder{getDecoder(MediaSourceType::AUDIO)};
2167 3 : if (decoder)
2168 : {
2169 2 : RIALTO_SERVER_LOG_DEBUG("Set limit-buffering-ms to %u", bufferingLimit);
2170 :
2171 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "limit-buffering-ms"))
2172 : {
2173 1 : m_glibWrapper->gObjectSet(decoder, "limit-buffering-ms", bufferingLimit, nullptr);
2174 1 : result = true;
2175 : }
2176 : else
2177 : {
2178 1 : RIALTO_SERVER_LOG_ERROR("Failed to set limit-buffering-ms property on decoder '%s'",
2179 : GST_ELEMENT_NAME(decoder));
2180 : }
2181 2 : m_gstWrapper->gstObjectUnref(decoder);
2182 2 : std::unique_lock lock{m_context.propertyMutex};
2183 2 : m_context.pendingBufferingLimit.reset();
2184 : }
2185 : else
2186 : {
2187 1 : RIALTO_SERVER_LOG_DEBUG("Pending limit-buffering-ms, decoder is NULL");
2188 : }
2189 3 : return result;
2190 : }
2191 :
2192 2 : bool GstGenericPlayer::setUseBuffering()
2193 : {
2194 2 : std::unique_lock lock{m_context.propertyMutex};
2195 2 : if (m_context.pendingUseBuffering.has_value())
2196 : {
2197 2 : if (m_context.playbackGroup.m_curAudioDecodeBin)
2198 : {
2199 1 : gboolean useBufferingGboolean{m_context.pendingUseBuffering.value() ? TRUE : FALSE};
2200 1 : RIALTO_SERVER_LOG_DEBUG("Set use-buffering to %d", useBufferingGboolean);
2201 1 : m_glibWrapper->gObjectSet(m_context.playbackGroup.m_curAudioDecodeBin, "use-buffering",
2202 : useBufferingGboolean, nullptr);
2203 1 : m_context.pendingUseBuffering.reset();
2204 1 : return true;
2205 : }
2206 : else
2207 : {
2208 1 : RIALTO_SERVER_LOG_DEBUG("Pending use-buffering, decodebin is NULL");
2209 : }
2210 : }
2211 1 : return false;
2212 2 : }
2213 :
2214 8 : bool GstGenericPlayer::setWesterossinkSecondaryVideo()
2215 : {
2216 8 : bool result = false;
2217 8 : GstElementFactory *factory = m_gstWrapper->gstElementFactoryFind("westerossink");
2218 8 : if (factory)
2219 : {
2220 7 : GstElement *videoSink = m_gstWrapper->gstElementFactoryCreate(factory, nullptr);
2221 7 : if (videoSink)
2222 : {
2223 5 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "res-usage"))
2224 : {
2225 4 : m_glibWrapper->gObjectSet(videoSink, "res-usage", 0x0u, nullptr);
2226 4 : m_glibWrapper->gObjectSet(m_context.pipeline, "video-sink", videoSink, nullptr);
2227 4 : result = true;
2228 : }
2229 : else
2230 : {
2231 1 : RIALTO_SERVER_LOG_ERROR("Failed to set the westerossink res-usage");
2232 1 : m_gstWrapper->gstObjectUnref(GST_OBJECT(videoSink));
2233 : }
2234 : }
2235 : else
2236 : {
2237 2 : RIALTO_SERVER_LOG_ERROR("Failed to create the westerossink");
2238 : }
2239 :
2240 7 : m_gstWrapper->gstObjectUnref(GST_OBJECT(factory));
2241 : }
2242 : else
2243 : {
2244 : // No westeros sink
2245 1 : result = true;
2246 : }
2247 :
2248 8 : return result;
2249 : }
2250 :
2251 8 : bool GstGenericPlayer::setErmContext()
2252 : {
2253 8 : bool result = false;
2254 8 : GstContext *context = m_gstWrapper->gstContextNew("erm", false);
2255 8 : if (context)
2256 : {
2257 6 : GstStructure *contextStructure = m_gstWrapper->gstContextWritableStructure(context);
2258 6 : if (contextStructure)
2259 : {
2260 5 : m_gstWrapper->gstStructureSet(contextStructure, "res-usage", G_TYPE_UINT, 0x0u, nullptr);
2261 5 : m_gstWrapper->gstElementSetContext(GST_ELEMENT(m_context.pipeline), context);
2262 5 : result = true;
2263 : }
2264 : else
2265 : {
2266 1 : RIALTO_SERVER_LOG_ERROR("Failed to create the erm structure");
2267 : }
2268 6 : m_gstWrapper->gstContextUnref(context);
2269 : }
2270 : else
2271 : {
2272 2 : RIALTO_SERVER_LOG_ERROR("Failed to create the erm context");
2273 : }
2274 :
2275 8 : return result;
2276 : }
2277 :
2278 6 : void GstGenericPlayer::startPositionReportingAndCheckAudioUnderflowTimer()
2279 : {
2280 6 : if (m_positionReportingAndCheckAudioUnderflowTimer && m_positionReportingAndCheckAudioUnderflowTimer->isActive())
2281 : {
2282 1 : return;
2283 : }
2284 :
2285 15 : m_positionReportingAndCheckAudioUnderflowTimer = m_timerFactory->createTimer(
2286 : kPositionReportTimerMs,
2287 10 : [this]()
2288 : {
2289 1 : if (m_workerThread)
2290 : {
2291 1 : m_workerThread->enqueueTask(m_taskFactory->createReportPosition(m_context, *this));
2292 1 : m_workerThread->enqueueTask(m_taskFactory->createCheckAudioUnderflow(m_context, *this));
2293 : }
2294 1 : },
2295 5 : firebolt::rialto::common::TimerType::PERIODIC);
2296 : }
2297 :
2298 4 : void GstGenericPlayer::stopPositionReportingAndCheckAudioUnderflowTimer()
2299 : {
2300 4 : if (m_positionReportingAndCheckAudioUnderflowTimer && m_positionReportingAndCheckAudioUnderflowTimer->isActive())
2301 : {
2302 1 : m_positionReportingAndCheckAudioUnderflowTimer->cancel();
2303 1 : m_positionReportingAndCheckAudioUnderflowTimer.reset();
2304 : }
2305 4 : }
2306 :
2307 6 : void GstGenericPlayer::startNotifyPlaybackInfoTimer()
2308 : {
2309 : static constexpr std::chrono::milliseconds kPlaybackInfoTimerMs{32};
2310 6 : if (m_playbackInfoTimer && m_playbackInfoTimer->isActive())
2311 : {
2312 1 : return;
2313 : }
2314 :
2315 5 : notifyPlaybackInfo();
2316 :
2317 : m_playbackInfoTimer =
2318 5 : m_timerFactory
2319 6 : ->createTimer(kPlaybackInfoTimerMs, [this]() { notifyPlaybackInfo(); }, firebolt::rialto::common::TimerType::PERIODIC);
2320 : }
2321 :
2322 3 : void GstGenericPlayer::stopNotifyPlaybackInfoTimer()
2323 : {
2324 3 : if (m_playbackInfoTimer && m_playbackInfoTimer->isActive())
2325 : {
2326 1 : m_playbackInfoTimer->cancel();
2327 1 : m_playbackInfoTimer.reset();
2328 : }
2329 3 : }
2330 :
2331 0 : void GstGenericPlayer::startSubtitleClockResyncTimer()
2332 : {
2333 0 : if (m_subtitleClockResyncTimer && m_subtitleClockResyncTimer->isActive())
2334 : {
2335 0 : return;
2336 : }
2337 :
2338 0 : m_subtitleClockResyncTimer = m_timerFactory->createTimer(
2339 : kSubtitleClockResyncInterval,
2340 0 : [this]()
2341 : {
2342 0 : if (m_workerThread)
2343 : {
2344 0 : m_workerThread->enqueueTask(m_taskFactory->createSynchroniseSubtitleClock(m_context, *this));
2345 : }
2346 0 : },
2347 0 : firebolt::rialto::common::TimerType::PERIODIC);
2348 : }
2349 :
2350 0 : void GstGenericPlayer::stopSubtitleClockResyncTimer()
2351 : {
2352 0 : if (m_subtitleClockResyncTimer && m_subtitleClockResyncTimer->isActive())
2353 : {
2354 0 : m_subtitleClockResyncTimer->cancel();
2355 0 : m_subtitleClockResyncTimer.reset();
2356 : }
2357 : }
2358 :
2359 2 : void GstGenericPlayer::stopWorkerThread()
2360 : {
2361 2 : if (m_workerThread)
2362 : {
2363 2 : m_workerThread->stop();
2364 : }
2365 : }
2366 :
2367 0 : void GstGenericPlayer::setPendingPlaybackRate()
2368 : {
2369 0 : RIALTO_SERVER_LOG_INFO("Setting pending playback rate");
2370 0 : setPlaybackRate(m_context.pendingPlaybackRate);
2371 : }
2372 :
2373 1 : void GstGenericPlayer::renderFrame()
2374 : {
2375 1 : if (m_workerThread)
2376 : {
2377 1 : m_workerThread->enqueueTask(m_taskFactory->createRenderFrame(m_context, *this));
2378 : }
2379 : }
2380 :
2381 18 : void GstGenericPlayer::setVolume(double targetVolume, uint32_t volumeDuration, firebolt::rialto::EaseType easeType)
2382 : {
2383 18 : if (m_workerThread)
2384 : {
2385 36 : m_workerThread->enqueueTask(
2386 36 : m_taskFactory->createSetVolume(m_context, *this, targetVolume, volumeDuration, easeType));
2387 : }
2388 18 : }
2389 :
2390 9 : bool GstGenericPlayer::getVolume(double ¤tVolume)
2391 : {
2392 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
2393 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
2394 9 : if (!m_context.pipeline)
2395 : {
2396 0 : return false;
2397 : }
2398 :
2399 : // NOTE: No gstreamer documentation for "fade-volume" could be found at the time this code was written.
2400 : // Therefore the author performed several tests on a supported platform (Flex2) to determine the behaviour of this property.
2401 : // The code has been written to be backwardly compatible on platforms that don't have this property.
2402 : // The observed behaviour was:
2403 : // - if the returned fade volume is negative then audio-fade is not active. In this case the usual technique
2404 : // to find volume in the pipeline works and is used.
2405 : // - if the returned fade volume is positive then audio-fade is active. In this case the returned fade volume
2406 : // directly returns the current volume level 0=min to 100=max (and the pipeline's current volume level is
2407 : // meaningless and doesn't contribute in this case).
2408 9 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
2409 11 : if (m_context.audioFadeEnabled && sink &&
2410 2 : m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "fade-volume"))
2411 : {
2412 2 : gint fadeVolume{-100};
2413 2 : m_glibWrapper->gObjectGet(sink, "fade-volume", &fadeVolume, NULL);
2414 2 : if (fadeVolume < 0)
2415 : {
2416 1 : currentVolume = m_gstWrapper->gstStreamVolumeGetVolume(GST_STREAM_VOLUME(m_context.pipeline),
2417 : GST_STREAM_VOLUME_FORMAT_LINEAR);
2418 1 : RIALTO_SERVER_LOG_INFO("Fade volume is negative, using volume from pipeline: %f", currentVolume);
2419 : }
2420 : else
2421 : {
2422 1 : currentVolume = static_cast<double>(fadeVolume) / 100.0;
2423 1 : RIALTO_SERVER_LOG_INFO("Fade volume is supported: %f", currentVolume);
2424 : }
2425 : }
2426 : else
2427 : {
2428 7 : currentVolume = m_gstWrapper->gstStreamVolumeGetVolume(GST_STREAM_VOLUME(m_context.pipeline),
2429 : GST_STREAM_VOLUME_FORMAT_LINEAR);
2430 7 : RIALTO_SERVER_LOG_INFO("Fade volume is not supported, using volume from pipeline: %f", currentVolume);
2431 : }
2432 :
2433 9 : if (sink)
2434 2 : m_gstWrapper->gstObjectUnref(sink);
2435 :
2436 9 : return true;
2437 : }
2438 :
2439 1 : void GstGenericPlayer::setMute(const MediaSourceType &mediaSourceType, bool mute)
2440 : {
2441 1 : if (m_workerThread)
2442 : {
2443 1 : m_workerThread->enqueueTask(m_taskFactory->createSetMute(m_context, *this, mediaSourceType, mute));
2444 : }
2445 : }
2446 :
2447 5 : bool GstGenericPlayer::getMute(const MediaSourceType &mediaSourceType, bool &mute)
2448 : {
2449 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
2450 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
2451 5 : if (mediaSourceType == MediaSourceType::SUBTITLE)
2452 : {
2453 2 : if (!m_context.subtitleSink)
2454 : {
2455 1 : RIALTO_SERVER_LOG_ERROR("There is no subtitle sink");
2456 1 : return false;
2457 : }
2458 1 : gboolean muteValue{FALSE};
2459 1 : m_glibWrapper->gObjectGet(m_context.subtitleSink, "mute", &muteValue, nullptr);
2460 1 : mute = muteValue;
2461 : }
2462 3 : else if (mediaSourceType == MediaSourceType::AUDIO)
2463 : {
2464 2 : if (!m_context.pipeline)
2465 : {
2466 1 : return false;
2467 : }
2468 1 : mute = m_gstWrapper->gstStreamVolumeGetMute(GST_STREAM_VOLUME(m_context.pipeline));
2469 : }
2470 : else
2471 : {
2472 1 : RIALTO_SERVER_LOG_ERROR("Getting mute for type %s unsupported", common::convertMediaSourceType(mediaSourceType));
2473 1 : return false;
2474 : }
2475 :
2476 2 : return true;
2477 : }
2478 :
2479 2 : bool GstGenericPlayer::isAsync(const MediaSourceType &mediaSourceType) const
2480 : {
2481 2 : GstElement *sink = getSink(mediaSourceType);
2482 2 : if (!sink)
2483 : {
2484 0 : RIALTO_SERVER_LOG_WARN("Sink not found for %s", common::convertMediaSourceType(mediaSourceType));
2485 0 : return true; // Our sinks are async by default
2486 : }
2487 2 : gboolean returnValue{TRUE};
2488 2 : m_glibWrapper->gObjectGet(sink, "async", &returnValue, nullptr);
2489 2 : m_gstWrapper->gstObjectUnref(sink);
2490 2 : return returnValue == TRUE;
2491 : }
2492 :
2493 1 : void GstGenericPlayer::setTextTrackIdentifier(const std::string &textTrackIdentifier)
2494 : {
2495 1 : if (m_workerThread)
2496 : {
2497 1 : m_workerThread->enqueueTask(m_taskFactory->createSetTextTrackIdentifier(m_context, textTrackIdentifier));
2498 : }
2499 : }
2500 :
2501 3 : bool GstGenericPlayer::getTextTrackIdentifier(std::string &textTrackIdentifier)
2502 : {
2503 3 : if (!m_context.subtitleSink)
2504 : {
2505 1 : RIALTO_SERVER_LOG_ERROR("There is no subtitle sink");
2506 1 : return false;
2507 : }
2508 :
2509 2 : gchar *identifier = nullptr;
2510 2 : m_glibWrapper->gObjectGet(m_context.subtitleSink, "text-track-identifier", &identifier, nullptr);
2511 :
2512 2 : if (identifier)
2513 : {
2514 1 : textTrackIdentifier = identifier;
2515 1 : m_glibWrapper->gFree(identifier);
2516 1 : return true;
2517 : }
2518 : else
2519 : {
2520 1 : RIALTO_SERVER_LOG_ERROR("Failed to get text track identifier");
2521 1 : return false;
2522 : }
2523 : }
2524 :
2525 1 : bool GstGenericPlayer::setLowLatency(bool lowLatency)
2526 : {
2527 1 : if (m_workerThread)
2528 : {
2529 1 : m_workerThread->enqueueTask(m_taskFactory->createSetLowLatency(m_context, *this, lowLatency));
2530 : }
2531 1 : return true;
2532 : }
2533 :
2534 1 : bool GstGenericPlayer::setSync(bool sync)
2535 : {
2536 1 : if (m_workerThread)
2537 : {
2538 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSync(m_context, *this, sync));
2539 : }
2540 1 : return true;
2541 : }
2542 :
2543 4 : bool GstGenericPlayer::getSync(bool &sync)
2544 : {
2545 4 : bool returnValue{false};
2546 4 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
2547 4 : if (sink)
2548 : {
2549 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "sync"))
2550 : {
2551 1 : m_glibWrapper->gObjectGet(sink, "sync", &sync, nullptr);
2552 1 : returnValue = true;
2553 : }
2554 : else
2555 : {
2556 1 : RIALTO_SERVER_LOG_ERROR("Sync not supported in sink '%s'", GST_ELEMENT_NAME(sink));
2557 : }
2558 2 : m_gstWrapper->gstObjectUnref(sink);
2559 : }
2560 2 : else if (m_context.pendingSync.has_value())
2561 : {
2562 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
2563 1 : sync = m_context.pendingSync.value();
2564 1 : returnValue = true;
2565 : }
2566 : else
2567 : {
2568 : // We dont know the default setting on the sync, so return failure here
2569 1 : RIALTO_SERVER_LOG_WARN("No audio sink attached or queued value");
2570 : }
2571 :
2572 4 : return returnValue;
2573 : }
2574 :
2575 1 : bool GstGenericPlayer::setSyncOff(bool syncOff)
2576 : {
2577 1 : if (m_workerThread)
2578 : {
2579 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSyncOff(m_context, *this, syncOff));
2580 : }
2581 1 : return true;
2582 : }
2583 :
2584 1 : bool GstGenericPlayer::setStreamSyncMode(const MediaSourceType &mediaSourceType, int32_t streamSyncMode)
2585 : {
2586 1 : if (m_workerThread)
2587 : {
2588 2 : m_workerThread->enqueueTask(
2589 2 : m_taskFactory->createSetStreamSyncMode(m_context, *this, mediaSourceType, streamSyncMode));
2590 : }
2591 1 : return true;
2592 : }
2593 :
2594 5 : bool GstGenericPlayer::getStreamSyncMode(int32_t &streamSyncMode)
2595 : {
2596 5 : bool returnValue{false};
2597 5 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
2598 5 : if (decoder && m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "stream-sync-mode"))
2599 : {
2600 2 : m_glibWrapper->gObjectGet(decoder, "stream-sync-mode", &streamSyncMode, nullptr);
2601 2 : returnValue = true;
2602 : }
2603 : else
2604 : {
2605 3 : std::unique_lock lock{m_context.propertyMutex};
2606 3 : if (m_context.pendingStreamSyncMode.find(MediaSourceType::AUDIO) != m_context.pendingStreamSyncMode.end())
2607 : {
2608 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
2609 1 : streamSyncMode = m_context.pendingStreamSyncMode[MediaSourceType::AUDIO];
2610 1 : returnValue = true;
2611 : }
2612 : else
2613 : {
2614 2 : RIALTO_SERVER_LOG_ERROR("Stream sync mode not supported in decoder '%s'",
2615 : (decoder ? GST_ELEMENT_NAME(decoder) : "null"));
2616 : }
2617 3 : }
2618 :
2619 5 : if (decoder)
2620 3 : m_gstWrapper->gstObjectUnref(GST_OBJECT(decoder));
2621 :
2622 5 : return returnValue;
2623 : }
2624 :
2625 1 : void GstGenericPlayer::ping(std::unique_ptr<IHeartbeatHandler> &&heartbeatHandler)
2626 : {
2627 1 : if (m_workerThread)
2628 : {
2629 1 : m_workerThread->enqueueTask(m_taskFactory->createPing(std::move(heartbeatHandler)));
2630 : }
2631 : }
2632 :
2633 2 : void GstGenericPlayer::flush(const MediaSourceType &mediaSourceType, bool resetTime, bool &async)
2634 : {
2635 2 : if (m_workerThread)
2636 : {
2637 2 : async = isAsync(mediaSourceType);
2638 2 : m_flushWatcher->setFlushing(mediaSourceType, async);
2639 2 : m_workerThread->enqueueTask(m_taskFactory->createFlush(m_context, *this, mediaSourceType, resetTime, async));
2640 : }
2641 : }
2642 :
2643 1 : void GstGenericPlayer::setSourcePosition(const MediaSourceType &mediaSourceType, int64_t position, bool resetTime,
2644 : double appliedRate, uint64_t stopPosition)
2645 : {
2646 1 : if (m_workerThread)
2647 : {
2648 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSourcePosition(m_context, mediaSourceType, position,
2649 : resetTime, appliedRate, stopPosition));
2650 : }
2651 : }
2652 :
2653 0 : void GstGenericPlayer::setSubtitleOffset(int64_t position)
2654 : {
2655 0 : if (m_workerThread)
2656 : {
2657 0 : m_workerThread->enqueueTask(m_taskFactory->createSetSubtitleOffset(m_context, position));
2658 : }
2659 : }
2660 :
2661 1 : void GstGenericPlayer::processAudioGap(int64_t position, uint32_t duration, int64_t discontinuityGap, bool audioAac)
2662 : {
2663 1 : if (m_workerThread)
2664 : {
2665 2 : m_workerThread->enqueueTask(
2666 2 : m_taskFactory->createProcessAudioGap(m_context, position, duration, discontinuityGap, audioAac));
2667 : }
2668 1 : }
2669 :
2670 1 : void GstGenericPlayer::setBufferingLimit(uint32_t limitBufferingMs)
2671 : {
2672 1 : if (m_workerThread)
2673 : {
2674 1 : m_workerThread->enqueueTask(m_taskFactory->createSetBufferingLimit(m_context, *this, limitBufferingMs));
2675 : }
2676 : }
2677 :
2678 5 : bool GstGenericPlayer::getBufferingLimit(uint32_t &limitBufferingMs)
2679 : {
2680 5 : bool returnValue{false};
2681 5 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
2682 5 : if (decoder && m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "limit-buffering-ms"))
2683 : {
2684 2 : m_glibWrapper->gObjectGet(decoder, "limit-buffering-ms", &limitBufferingMs, nullptr);
2685 2 : returnValue = true;
2686 : }
2687 : else
2688 : {
2689 3 : std::unique_lock lock{m_context.propertyMutex};
2690 3 : if (m_context.pendingBufferingLimit.has_value())
2691 : {
2692 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
2693 1 : limitBufferingMs = m_context.pendingBufferingLimit.value();
2694 1 : returnValue = true;
2695 : }
2696 : else
2697 : {
2698 2 : RIALTO_SERVER_LOG_ERROR("buffering limit not supported in decoder '%s'",
2699 : (decoder ? GST_ELEMENT_NAME(decoder) : "null"));
2700 : }
2701 3 : }
2702 :
2703 5 : if (decoder)
2704 3 : m_gstWrapper->gstObjectUnref(GST_OBJECT(decoder));
2705 :
2706 5 : return returnValue;
2707 : }
2708 :
2709 1 : void GstGenericPlayer::setUseBuffering(bool useBuffering)
2710 : {
2711 1 : if (m_workerThread)
2712 : {
2713 1 : m_workerThread->enqueueTask(m_taskFactory->createSetUseBuffering(m_context, *this, useBuffering));
2714 : }
2715 : }
2716 :
2717 3 : bool GstGenericPlayer::getUseBuffering(bool &useBuffering)
2718 : {
2719 3 : if (m_context.playbackGroup.m_curAudioDecodeBin)
2720 : {
2721 1 : m_glibWrapper->gObjectGet(m_context.playbackGroup.m_curAudioDecodeBin, "use-buffering", &useBuffering, nullptr);
2722 1 : return true;
2723 : }
2724 : else
2725 : {
2726 2 : std::unique_lock lock{m_context.propertyMutex};
2727 2 : if (m_context.pendingUseBuffering.has_value())
2728 : {
2729 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
2730 1 : useBuffering = m_context.pendingUseBuffering.value();
2731 1 : return true;
2732 : }
2733 2 : }
2734 1 : return false;
2735 : }
2736 :
2737 1 : void GstGenericPlayer::switchSource(const std::unique_ptr<IMediaPipeline::MediaSource> &mediaSource)
2738 : {
2739 1 : if (m_workerThread)
2740 : {
2741 1 : m_workerThread->enqueueTask(m_taskFactory->createSwitchSource(*this, mediaSource));
2742 : }
2743 : }
2744 :
2745 1 : void GstGenericPlayer::handleBusMessage(GstMessage *message)
2746 : {
2747 1 : m_workerThread->enqueueTask(m_taskFactory->createHandleBusMessage(m_context, *this, message, *m_flushWatcher));
2748 : }
2749 :
2750 1 : void GstGenericPlayer::updatePlaybackGroup(GstElement *typefind, const GstCaps *caps)
2751 : {
2752 1 : m_workerThread->enqueueTask(m_taskFactory->createUpdatePlaybackGroup(m_context, *this, typefind, caps));
2753 : }
2754 :
2755 3 : void GstGenericPlayer::addAutoVideoSinkChild(GObject *object)
2756 : {
2757 : // Only add children that are sinks
2758 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2759 : {
2760 2 : RIALTO_SERVER_LOG_DEBUG("Store AutoVideoSink child sink");
2761 :
2762 2 : if (m_context.autoVideoChildSink && m_context.autoVideoChildSink != GST_ELEMENT(object))
2763 : {
2764 1 : RIALTO_SERVER_LOG_MIL("AutoVideoSink child is been overwritten");
2765 : }
2766 2 : m_context.autoVideoChildSink = GST_ELEMENT(object);
2767 : }
2768 3 : }
2769 :
2770 3 : void GstGenericPlayer::addAutoAudioSinkChild(GObject *object)
2771 : {
2772 : // Only add children that are sinks
2773 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2774 : {
2775 2 : RIALTO_SERVER_LOG_DEBUG("Store AutoAudioSink child sink");
2776 :
2777 2 : if (m_context.autoAudioChildSink && m_context.autoAudioChildSink != GST_ELEMENT(object))
2778 : {
2779 1 : RIALTO_SERVER_LOG_MIL("AutoAudioSink child is been overwritten");
2780 : }
2781 2 : m_context.autoAudioChildSink = GST_ELEMENT(object);
2782 : }
2783 3 : }
2784 :
2785 3 : void GstGenericPlayer::removeAutoVideoSinkChild(GObject *object)
2786 : {
2787 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2788 : {
2789 3 : RIALTO_SERVER_LOG_DEBUG("Remove AutoVideoSink child sink");
2790 :
2791 3 : if (m_context.autoVideoChildSink && m_context.autoVideoChildSink != GST_ELEMENT(object))
2792 : {
2793 1 : RIALTO_SERVER_LOG_MIL("AutoVideoSink child sink is not the same as the one stored");
2794 1 : return;
2795 : }
2796 :
2797 2 : m_context.autoVideoChildSink = nullptr;
2798 : }
2799 : }
2800 :
2801 3 : void GstGenericPlayer::removeAutoAudioSinkChild(GObject *object)
2802 : {
2803 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2804 : {
2805 3 : RIALTO_SERVER_LOG_DEBUG("Remove AutoAudioSink child sink");
2806 :
2807 3 : if (m_context.autoAudioChildSink && m_context.autoAudioChildSink != GST_ELEMENT(object))
2808 : {
2809 1 : RIALTO_SERVER_LOG_MIL("AutoAudioSink child sink is not the same as the one stored");
2810 1 : return;
2811 : }
2812 :
2813 2 : m_context.autoAudioChildSink = nullptr;
2814 : }
2815 : }
2816 :
2817 14 : GstElement *GstGenericPlayer::getSinkChildIfAutoVideoSink(GstElement *sink) const
2818 : {
2819 14 : const gchar *kTmpName = m_glibWrapper->gTypeName(G_OBJECT_TYPE(sink));
2820 14 : if (!kTmpName)
2821 0 : return sink;
2822 :
2823 28 : const std::string kElementTypeName{kTmpName};
2824 14 : if (kElementTypeName == "GstAutoVideoSink")
2825 : {
2826 1 : if (!m_context.autoVideoChildSink)
2827 : {
2828 0 : RIALTO_SERVER_LOG_WARN("No child sink has been added to the autovideosink");
2829 : }
2830 : else
2831 : {
2832 1 : return m_context.autoVideoChildSink;
2833 : }
2834 : }
2835 13 : return sink;
2836 14 : }
2837 :
2838 16 : GstElement *GstGenericPlayer::getSinkChildIfAutoAudioSink(GstElement *sink) const
2839 : {
2840 16 : const gchar *kTmpName = m_glibWrapper->gTypeName(G_OBJECT_TYPE(sink));
2841 16 : if (!kTmpName)
2842 0 : return sink;
2843 :
2844 32 : const std::string kElementTypeName{kTmpName};
2845 16 : if (kElementTypeName == "GstAutoAudioSink")
2846 : {
2847 1 : if (!m_context.autoAudioChildSink)
2848 : {
2849 0 : RIALTO_SERVER_LOG_WARN("No child sink has been added to the autoaudiosink");
2850 : }
2851 : else
2852 : {
2853 1 : return m_context.autoAudioChildSink;
2854 : }
2855 : }
2856 15 : return sink;
2857 16 : }
2858 :
2859 222 : void GstGenericPlayer::setPlaybinFlags(bool enableAudio)
2860 : {
2861 222 : unsigned flags = getGstPlayFlag("video") | getGstPlayFlag("native-video") | getGstPlayFlag("text");
2862 :
2863 222 : if (enableAudio)
2864 : {
2865 222 : flags |= getGstPlayFlag("audio");
2866 222 : flags |= shouldEnableNativeAudio() ? getGstPlayFlag("native-audio") : 0;
2867 : }
2868 :
2869 222 : m_glibWrapper->gObjectSet(m_context.pipeline, "flags", flags, nullptr);
2870 : }
2871 :
2872 222 : bool GstGenericPlayer::shouldEnableNativeAudio()
2873 : {
2874 222 : GstElementFactory *factory = m_gstWrapper->gstElementFactoryFind("brcmaudiosink");
2875 222 : if (factory)
2876 : {
2877 1 : m_gstWrapper->gstObjectUnref(GST_OBJECT(factory));
2878 1 : return true;
2879 : }
2880 221 : return false;
2881 : }
2882 :
2883 : }; // namespace firebolt::rialto::server
|