Line data Source code
1 : /*
2 : * If not stated otherwise in this file or this component's LICENSE file the
3 : * following copyright and licenses apply:
4 : *
5 : * Copyright 2022 Sky UK
6 : *
7 : * Licensed under the Apache License, Version 2.0 (the "License");
8 : * you may not use this file except in compliance with the License.
9 : * You may obtain a copy of the License at
10 : *
11 : * http://www.apache.org/licenses/LICENSE-2.0
12 : *
13 : * Unless required by applicable law or agreed to in writing, software
14 : * distributed under the License is distributed on an "AS IS" BASIS,
15 : * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 : * See the License for the specific language governing permissions and
17 : * limitations under the License.
18 : */
19 :
20 : #include <chrono>
21 : #include <cinttypes>
22 : #include <cstring>
23 : #include <ctime>
24 : #include <stdexcept>
25 :
26 : #include "FlushWatcher.h"
27 : #include "GstDispatcherThread.h"
28 : #include "GstGenericPlayer.h"
29 : #include "GstProtectionMetadata.h"
30 : #include "IGstTextTrackSinkFactory.h"
31 : #include "IMediaPipeline.h"
32 : #include "ITimer.h"
33 : #include "RialtoServerLogging.h"
34 : #include "TypeConverters.h"
35 : #include "Utils.h"
36 : #include "WorkerThread.h"
37 : #include "tasks/generic/GenericPlayerTaskFactory.h"
38 :
39 : namespace
40 : {
41 : /**
42 : * @brief Report position interval in ms.
43 : * The position reporting timer should be started whenever the PLAYING state is entered and stopped
44 : * whenever the session moves to another playback state.
45 : */
46 : constexpr std::chrono::milliseconds kPositionReportTimerMs{250};
47 : constexpr std::chrono::seconds kSubtitleClockResyncInterval{10};
48 :
49 1 : bool operator==(const firebolt::rialto::server::SegmentData &lhs, const firebolt::rialto::server::SegmentData &rhs)
50 : {
51 2 : return (lhs.position == rhs.position) && (lhs.resetTime == rhs.resetTime) && (lhs.appliedRate == rhs.appliedRate) &&
52 2 : (lhs.stopPosition == rhs.stopPosition);
53 : }
54 : } // namespace
55 :
56 : namespace firebolt::rialto::server
57 : {
58 : std::weak_ptr<IGstGenericPlayerFactory> GstGenericPlayerFactory::m_factory;
59 :
60 3 : std::shared_ptr<IGstGenericPlayerFactory> IGstGenericPlayerFactory::getFactory()
61 : {
62 3 : std::shared_ptr<IGstGenericPlayerFactory> factory = GstGenericPlayerFactory::m_factory.lock();
63 :
64 3 : if (!factory)
65 : {
66 : try
67 : {
68 3 : factory = std::make_shared<GstGenericPlayerFactory>();
69 : }
70 0 : catch (const std::exception &e)
71 : {
72 0 : RIALTO_SERVER_LOG_ERROR("Failed to create the gstreamer player factory, reason: %s", e.what());
73 : }
74 :
75 3 : GstGenericPlayerFactory::m_factory = factory;
76 : }
77 :
78 3 : return factory;
79 : }
80 :
81 1 : std::unique_ptr<IGstGenericPlayer> GstGenericPlayerFactory::createGstGenericPlayer(
82 : IGstGenericPlayerClient *client, IDecryptionService &decryptionService, MediaType type,
83 : const VideoRequirements &videoRequirements,
84 : const std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapperFactory> &rdkGstreamerUtilsWrapperFactory)
85 : {
86 1 : std::unique_ptr<IGstGenericPlayer> gstPlayer;
87 :
88 : try
89 : {
90 1 : auto gstWrapperFactory = firebolt::rialto::wrappers::IGstWrapperFactory::getFactory();
91 1 : auto glibWrapperFactory = firebolt::rialto::wrappers::IGlibWrapperFactory::getFactory();
92 1 : std::shared_ptr<firebolt::rialto::wrappers::IGstWrapper> gstWrapper;
93 1 : std::shared_ptr<firebolt::rialto::wrappers::IGlibWrapper> glibWrapper;
94 1 : std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapper> rdkGstreamerUtilsWrapper;
95 1 : if ((!gstWrapperFactory) || (!(gstWrapper = gstWrapperFactory->getGstWrapper())))
96 : {
97 0 : throw std::runtime_error("Cannot create GstWrapper");
98 : }
99 1 : if ((!glibWrapperFactory) || (!(glibWrapper = glibWrapperFactory->getGlibWrapper())))
100 : {
101 0 : throw std::runtime_error("Cannot create GlibWrapper");
102 : }
103 2 : if ((!rdkGstreamerUtilsWrapperFactory) ||
104 2 : (!(rdkGstreamerUtilsWrapper = rdkGstreamerUtilsWrapperFactory->createRdkGstreamerUtilsWrapper())))
105 : {
106 0 : throw std::runtime_error("Cannot create RdkGstreamerUtilsWrapper");
107 : }
108 : gstPlayer = std::make_unique<
109 2 : GstGenericPlayer>(client, decryptionService, type, videoRequirements, gstWrapper, glibWrapper,
110 2 : rdkGstreamerUtilsWrapper, IGstInitialiser::instance(), std::make_unique<FlushWatcher>(),
111 2 : IGstSrcFactory::getFactory(), common::ITimerFactory::getFactory(),
112 2 : std::make_unique<GenericPlayerTaskFactory>(client, gstWrapper, glibWrapper,
113 : rdkGstreamerUtilsWrapper,
114 2 : IGstTextTrackSinkFactory::createFactory()),
115 2 : std::make_unique<WorkerThreadFactory>(), std::make_unique<GstDispatcherThreadFactory>(),
116 3 : IGstProtectionMetadataHelperFactory::createFactory());
117 1 : }
118 0 : catch (const std::exception &e)
119 : {
120 0 : RIALTO_SERVER_LOG_ERROR("Failed to create the gstreamer player, reason: %s", e.what());
121 : }
122 :
123 1 : return gstPlayer;
124 : }
125 :
126 219 : GstGenericPlayer::GstGenericPlayer(
127 : IGstGenericPlayerClient *client, IDecryptionService &decryptionService, MediaType type,
128 : const VideoRequirements &videoRequirements,
129 : const std::shared_ptr<firebolt::rialto::wrappers::IGstWrapper> &gstWrapper,
130 : const std::shared_ptr<firebolt::rialto::wrappers::IGlibWrapper> &glibWrapper,
131 : const std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapper> &rdkGstreamerUtilsWrapper,
132 : const IGstInitialiser &gstInitialiser, std::unique_ptr<IFlushWatcher> &&flushWatcher,
133 : const std::shared_ptr<IGstSrcFactory> &gstSrcFactory, std::shared_ptr<common::ITimerFactory> timerFactory,
134 : std::unique_ptr<IGenericPlayerTaskFactory> taskFactory, std::unique_ptr<IWorkerThreadFactory> workerThreadFactory,
135 : std::unique_ptr<IGstDispatcherThreadFactory> gstDispatcherThreadFactory,
136 219 : std::shared_ptr<IGstProtectionMetadataHelperFactory> gstProtectionMetadataFactory)
137 219 : : m_gstPlayerClient(client), m_gstWrapper{gstWrapper}, m_glibWrapper{glibWrapper},
138 438 : m_rdkGstreamerUtilsWrapper{rdkGstreamerUtilsWrapper}, m_timerFactory{timerFactory},
139 657 : m_taskFactory{std::move(taskFactory)}, m_flushWatcher{std::move(flushWatcher)}
140 : {
141 219 : RIALTO_SERVER_LOG_DEBUG("GstGenericPlayer is constructed.");
142 :
143 219 : gstInitialiser.waitForInitialisation();
144 :
145 219 : m_context.decryptionService = &decryptionService;
146 :
147 219 : if ((!gstSrcFactory) || (!(m_context.gstSrc = gstSrcFactory->getGstSrc())))
148 : {
149 2 : throw std::runtime_error("Cannot create GstSrc");
150 : }
151 :
152 217 : if (!timerFactory)
153 : {
154 1 : throw std::runtime_error("TimeFactory is invalid");
155 : }
156 :
157 432 : if ((!gstProtectionMetadataFactory) ||
158 432 : (!(m_protectionMetadataWrapper = gstProtectionMetadataFactory->createProtectionMetadataWrapper(m_gstWrapper))))
159 : {
160 0 : throw std::runtime_error("Cannot create protection metadata wrapper");
161 : }
162 :
163 : // Ensure that rialtosrc has been initalised
164 216 : m_context.gstSrc->initSrc();
165 :
166 : // Start task thread
167 216 : if ((!workerThreadFactory) || (!(m_workerThread = workerThreadFactory->createWorkerThread())))
168 : {
169 0 : throw std::runtime_error("Failed to create the worker thread");
170 : }
171 :
172 : // Initialise pipeline
173 216 : switch (type)
174 : {
175 215 : case MediaType::MSE:
176 : {
177 215 : initMsePipeline();
178 215 : break;
179 : }
180 1 : default:
181 : {
182 1 : resetWorkerThread();
183 1 : throw std::runtime_error("Media type not supported");
184 : }
185 : }
186 :
187 : // Check the video requirements for a limited video.
188 : // If the video requirements are set to anything lower than the minimum, this playback is assumed to be a secondary
189 : // video in a dual video scenario.
190 215 : if ((kMinPrimaryVideoWidth > videoRequirements.maxWidth) || (kMinPrimaryVideoHeight > videoRequirements.maxHeight))
191 : {
192 8 : RIALTO_SERVER_LOG_MIL("Secondary video playback selected");
193 8 : bool westerossinkSecondaryVideoResult = setWesterossinkSecondaryVideo();
194 8 : bool ermContextResult = setErmContext();
195 8 : if (!westerossinkSecondaryVideoResult && !ermContextResult)
196 : {
197 1 : resetWorkerThread();
198 1 : termPipeline();
199 1 : throw std::runtime_error("Could not set secondary video");
200 : }
201 7 : }
202 : else
203 : {
204 207 : RIALTO_SERVER_LOG_MIL("Primary video playback selected");
205 : }
206 :
207 428 : m_gstDispatcherThread = gstDispatcherThreadFactory->createGstDispatcherThread(*this, m_context.pipeline,
208 214 : m_context.flushOnPrerollController,
209 214 : m_gstWrapper);
210 299 : }
211 :
212 428 : GstGenericPlayer::~GstGenericPlayer()
213 : {
214 214 : RIALTO_SERVER_LOG_DEBUG("GstGenericPlayer is destructed.");
215 214 : m_gstDispatcherThread.reset();
216 :
217 214 : resetWorkerThread();
218 :
219 214 : termPipeline();
220 428 : }
221 :
222 215 : void GstGenericPlayer::initMsePipeline()
223 : {
224 : // Make playbin
225 215 : m_context.pipeline = m_gstWrapper->gstElementFactoryMake("playbin", "media_pipeline");
226 : // Set pipeline flags
227 215 : setPlaybinFlags(true);
228 :
229 : // Set callbacks
230 215 : m_glibWrapper->gSignalConnect(m_context.pipeline, "source-setup", G_CALLBACK(&GstGenericPlayer::setupSource), this);
231 215 : m_glibWrapper->gSignalConnect(m_context.pipeline, "element-setup", G_CALLBACK(&GstGenericPlayer::setupElement), this);
232 215 : m_glibWrapper->gSignalConnect(m_context.pipeline, "deep-element-added",
233 : G_CALLBACK(&GstGenericPlayer::deepElementAdded), this);
234 :
235 : // Set uri
236 215 : m_glibWrapper->gObjectSet(m_context.pipeline, "uri", "rialto://", nullptr);
237 :
238 : // Check playsink
239 215 : GstElement *playsink = (m_gstWrapper->gstBinGetByName(GST_BIN(m_context.pipeline), "playsink"));
240 215 : if (playsink)
241 : {
242 214 : m_glibWrapper->gObjectSet(G_OBJECT(playsink), "send-event-mode", 0, nullptr);
243 214 : m_gstWrapper->gstObjectUnref(playsink);
244 : }
245 : else
246 : {
247 1 : GST_WARNING("No playsink ?!?!?");
248 : }
249 215 : if (GST_STATE_CHANGE_FAILURE == m_gstWrapper->gstElementSetState(m_context.pipeline, GST_STATE_READY))
250 : {
251 1 : GST_WARNING("Failed to set pipeline to READY state");
252 : }
253 215 : RIALTO_SERVER_LOG_MIL("New RialtoServer's pipeline created");
254 : }
255 :
256 216 : void GstGenericPlayer::resetWorkerThread()
257 : {
258 : // Shutdown task thread
259 216 : m_workerThread->enqueueTask(m_taskFactory->createShutdown(*this));
260 216 : m_workerThread->join();
261 216 : m_workerThread.reset();
262 : }
263 :
264 215 : void GstGenericPlayer::termPipeline()
265 : {
266 215 : if (m_finishSourceSetupTimer && m_finishSourceSetupTimer->isActive())
267 : {
268 0 : m_finishSourceSetupTimer->cancel();
269 : }
270 :
271 215 : m_finishSourceSetupTimer.reset();
272 :
273 266 : for (auto &elem : m_context.streamInfo)
274 : {
275 51 : StreamInfo &streamInfo = elem.second;
276 53 : for (auto &buffer : streamInfo.buffers)
277 : {
278 2 : m_gstWrapper->gstBufferUnref(buffer);
279 : }
280 :
281 51 : streamInfo.buffers.clear();
282 : }
283 :
284 215 : m_taskFactory->createStop(m_context, *this)->execute();
285 215 : GstBus *bus = m_gstWrapper->gstPipelineGetBus(GST_PIPELINE(m_context.pipeline));
286 215 : m_gstWrapper->gstBusSetSyncHandler(bus, nullptr, nullptr, nullptr);
287 215 : m_gstWrapper->gstObjectUnref(bus);
288 :
289 215 : if (m_context.source)
290 : {
291 1 : m_gstWrapper->gstObjectUnref(m_context.source);
292 : }
293 215 : if (m_context.subtitleSink)
294 : {
295 4 : m_gstWrapper->gstObjectUnref(m_context.subtitleSink);
296 4 : m_context.subtitleSink = nullptr;
297 : }
298 :
299 215 : if (m_context.videoSink)
300 : {
301 0 : m_gstWrapper->gstObjectUnref(m_context.videoSink);
302 0 : m_context.videoSink = nullptr;
303 : }
304 :
305 : // Delete the pipeline
306 215 : m_gstWrapper->gstObjectUnref(m_context.pipeline);
307 :
308 215 : RIALTO_SERVER_LOG_MIL("RialtoServer's pipeline terminated");
309 : }
310 :
311 861 : unsigned GstGenericPlayer::getGstPlayFlag(const char *nick)
312 : {
313 : GFlagsClass *flagsClass =
314 861 : static_cast<GFlagsClass *>(m_glibWrapper->gTypeClassRef(m_glibWrapper->gTypeFromName("GstPlayFlags")));
315 861 : GFlagsValue *flag = m_glibWrapper->gFlagsGetValueByNick(flagsClass, nick);
316 861 : return flag ? flag->value : 0;
317 : }
318 :
319 1 : void GstGenericPlayer::setupSource(GstElement *pipeline, GstElement *source, GstGenericPlayer *self)
320 : {
321 1 : self->m_gstWrapper->gstObjectRef(source);
322 1 : if (self->m_workerThread)
323 : {
324 1 : self->m_workerThread->enqueueTask(self->m_taskFactory->createSetupSource(self->m_context, *self, source));
325 : }
326 : }
327 :
328 1 : void GstGenericPlayer::setupElement(GstElement *pipeline, GstElement *element, GstGenericPlayer *self)
329 : {
330 1 : RIALTO_SERVER_LOG_DEBUG("Element %s added to the pipeline", GST_ELEMENT_NAME(element));
331 1 : self->m_gstWrapper->gstObjectRef(element);
332 1 : if (self->m_workerThread)
333 : {
334 1 : self->m_workerThread->enqueueTask(self->m_taskFactory->createSetupElement(self->m_context, *self, element));
335 : }
336 : }
337 :
338 1 : void GstGenericPlayer::deepElementAdded(GstBin *pipeline, GstBin *bin, GstElement *element, GstGenericPlayer *self)
339 : {
340 1 : RIALTO_SERVER_LOG_DEBUG("Deep element %s added to the pipeline", GST_ELEMENT_NAME(element));
341 1 : if (self->m_workerThread)
342 : {
343 2 : self->m_workerThread->enqueueTask(
344 2 : self->m_taskFactory->createDeepElementAdded(self->m_context, *self, pipeline, bin, element));
345 : }
346 1 : }
347 :
348 1 : void GstGenericPlayer::attachSource(const std::unique_ptr<IMediaPipeline::MediaSource> &attachedSource)
349 : {
350 1 : if (m_workerThread)
351 : {
352 1 : m_workerThread->enqueueTask(m_taskFactory->createAttachSource(m_context, *this, attachedSource));
353 : }
354 : }
355 :
356 2 : void GstGenericPlayer::allSourcesAttached()
357 : {
358 2 : if (m_workerThread)
359 : {
360 2 : m_workerThread->enqueueTask(m_taskFactory->createFinishSetupSource(m_context, *this));
361 : }
362 : }
363 :
364 1 : void GstGenericPlayer::attachSamples(const IMediaPipeline::MediaSegmentVector &mediaSegments)
365 : {
366 1 : if (m_workerThread)
367 : {
368 1 : m_workerThread->enqueueTask(m_taskFactory->createAttachSamples(m_context, *this, mediaSegments));
369 : }
370 : }
371 :
372 1 : void GstGenericPlayer::attachSamples(const std::shared_ptr<IDataReader> &dataReader)
373 : {
374 1 : if (m_workerThread)
375 : {
376 1 : m_workerThread->enqueueTask(m_taskFactory->createReadShmDataAndAttachSamples(m_context, *this, dataReader));
377 : }
378 : }
379 :
380 1 : void GstGenericPlayer::setPosition(std::int64_t position)
381 : {
382 1 : if (m_workerThread)
383 : {
384 1 : m_workerThread->enqueueTask(m_taskFactory->createSetPosition(m_context, *this, position));
385 : }
386 : }
387 :
388 1 : void GstGenericPlayer::setPlaybackRate(double rate)
389 : {
390 1 : if (m_workerThread)
391 : {
392 1 : m_workerThread->enqueueTask(m_taskFactory->createSetPlaybackRate(m_context, rate));
393 : }
394 : }
395 :
396 11 : bool GstGenericPlayer::getPosition(std::int64_t &position)
397 : {
398 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
399 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
400 11 : position = getPosition(m_context.pipeline);
401 11 : if (position == -1)
402 : {
403 3 : return false;
404 : }
405 :
406 8 : return true;
407 : }
408 :
409 50 : GstElement *GstGenericPlayer::getSink(const MediaSourceType &mediaSourceType) const
410 : {
411 50 : const char *kSinkName{nullptr};
412 50 : GstElement *sink{nullptr};
413 50 : switch (mediaSourceType)
414 : {
415 29 : case MediaSourceType::AUDIO:
416 29 : kSinkName = "audio-sink";
417 29 : break;
418 18 : case MediaSourceType::VIDEO:
419 18 : kSinkName = "video-sink";
420 18 : break;
421 1 : case MediaSourceType::SUBTITLE:
422 1 : kSinkName = "text-sink";
423 1 : break;
424 2 : default:
425 2 : break;
426 : }
427 50 : if (!kSinkName)
428 : {
429 2 : RIALTO_SERVER_LOG_WARN("mediaSourceType not supported %d", static_cast<int>(mediaSourceType));
430 : }
431 : else
432 : {
433 48 : if (m_context.pipeline == nullptr)
434 : {
435 0 : RIALTO_SERVER_LOG_WARN("Pipeline is NULL!");
436 : }
437 : else
438 : {
439 48 : RIALTO_SERVER_LOG_DEBUG("Pipeline is valid: %p", m_context.pipeline);
440 : }
441 48 : m_glibWrapper->gObjectGet(m_context.pipeline, kSinkName, &sink, nullptr);
442 48 : if (sink && firebolt::rialto::MediaSourceType::SUBTITLE != mediaSourceType)
443 : {
444 30 : GstElement *autoSink{sink};
445 30 : if (firebolt::rialto::MediaSourceType::VIDEO == mediaSourceType)
446 14 : autoSink = getSinkChildIfAutoVideoSink(sink);
447 16 : else if (firebolt::rialto::MediaSourceType::AUDIO == mediaSourceType)
448 16 : autoSink = getSinkChildIfAutoAudioSink(sink);
449 :
450 : // Is this an auto-sink?...
451 30 : if (autoSink != sink)
452 : {
453 2 : m_gstWrapper->gstObjectUnref(GST_OBJECT(sink));
454 :
455 : // increase the reference count of the auto sink
456 2 : sink = GST_ELEMENT(m_gstWrapper->gstObjectRef(GST_OBJECT(autoSink)));
457 : }
458 : }
459 : }
460 50 : return sink;
461 : }
462 :
463 1 : void GstGenericPlayer::setSourceFlushed(const MediaSourceType &mediaSourceType)
464 : {
465 1 : m_flushWatcher->setFlushed(mediaSourceType);
466 : }
467 :
468 6 : void GstGenericPlayer::notifyPlaybackInfo()
469 : {
470 6 : PlaybackInfo info;
471 6 : getPosition(info.currentPosition);
472 6 : getVolume(info.volume);
473 6 : m_gstPlayerClient->notifyPlaybackInfo(info);
474 : }
475 :
476 19 : GstElement *GstGenericPlayer::getDecoder(const MediaSourceType &mediaSourceType)
477 : {
478 19 : GstIterator *it = m_gstWrapper->gstBinIterateRecurse(GST_BIN(m_context.pipeline));
479 19 : GValue item = G_VALUE_INIT;
480 19 : gboolean done = FALSE;
481 :
482 28 : while (!done)
483 : {
484 21 : switch (m_gstWrapper->gstIteratorNext(it, &item))
485 : {
486 12 : case GST_ITERATOR_OK:
487 : {
488 12 : GstElement *element = GST_ELEMENT(m_glibWrapper->gValueGetObject(&item));
489 12 : GstElementFactory *factory = m_gstWrapper->gstElementGetFactory(element);
490 :
491 12 : if (factory)
492 : {
493 12 : GstElementFactoryListType type = GST_ELEMENT_FACTORY_TYPE_DECODER;
494 12 : if (mediaSourceType == MediaSourceType::AUDIO)
495 : {
496 12 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO;
497 : }
498 0 : else if (mediaSourceType == MediaSourceType::VIDEO)
499 : {
500 0 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO;
501 : }
502 :
503 12 : if (m_gstWrapper->gstElementFactoryListIsType(factory, type))
504 : {
505 12 : m_glibWrapper->gValueUnset(&item);
506 12 : m_gstWrapper->gstIteratorFree(it);
507 12 : return GST_ELEMENT(m_gstWrapper->gstObjectRef(element));
508 : }
509 : }
510 :
511 0 : m_glibWrapper->gValueUnset(&item);
512 0 : break;
513 : }
514 2 : case GST_ITERATOR_RESYNC:
515 2 : m_gstWrapper->gstIteratorResync(it);
516 2 : break;
517 7 : case GST_ITERATOR_ERROR:
518 : case GST_ITERATOR_DONE:
519 7 : done = TRUE;
520 7 : break;
521 : }
522 : }
523 :
524 7 : RIALTO_SERVER_LOG_WARN("Could not find decoder");
525 :
526 7 : m_glibWrapper->gValueUnset(&item);
527 7 : m_gstWrapper->gstIteratorFree(it);
528 :
529 7 : return nullptr;
530 : }
531 :
532 3 : GstElement *GstGenericPlayer::getParser(const MediaSourceType &mediaSourceType)
533 : {
534 3 : GstIterator *it = m_gstWrapper->gstBinIterateRecurse(GST_BIN(m_context.pipeline));
535 3 : GValue item = G_VALUE_INIT;
536 3 : gboolean done = FALSE;
537 :
538 4 : while (!done)
539 : {
540 3 : switch (m_gstWrapper->gstIteratorNext(it, &item))
541 : {
542 2 : case GST_ITERATOR_OK:
543 : {
544 2 : GstElement *element = GST_ELEMENT(m_glibWrapper->gValueGetObject(&item));
545 2 : GstElementFactory *factory = m_gstWrapper->gstElementGetFactory(element);
546 :
547 2 : if (factory)
548 : {
549 2 : GstElementFactoryListType type = GST_ELEMENT_FACTORY_TYPE_PARSER;
550 2 : if (mediaSourceType == MediaSourceType::AUDIO)
551 : {
552 0 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO;
553 : }
554 2 : else if (mediaSourceType == MediaSourceType::VIDEO)
555 : {
556 2 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO;
557 : }
558 :
559 2 : if (m_gstWrapper->gstElementFactoryListIsType(factory, type))
560 : {
561 2 : m_glibWrapper->gValueUnset(&item);
562 2 : m_gstWrapper->gstIteratorFree(it);
563 2 : return GST_ELEMENT(m_gstWrapper->gstObjectRef(element));
564 : }
565 : }
566 :
567 0 : m_glibWrapper->gValueUnset(&item);
568 0 : break;
569 : }
570 0 : case GST_ITERATOR_RESYNC:
571 0 : m_gstWrapper->gstIteratorResync(it);
572 0 : break;
573 1 : case GST_ITERATOR_ERROR:
574 : case GST_ITERATOR_DONE:
575 1 : done = TRUE;
576 1 : break;
577 : }
578 : }
579 :
580 1 : RIALTO_SERVER_LOG_WARN("Could not find parser");
581 :
582 1 : m_glibWrapper->gValueUnset(&item);
583 1 : m_gstWrapper->gstIteratorFree(it);
584 :
585 1 : return nullptr;
586 : }
587 :
588 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate>
589 7 : GstGenericPlayer::createAudioAttributes(const std::unique_ptr<IMediaPipeline::MediaSource> &source) const
590 : {
591 7 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate> audioAttributes;
592 7 : const IMediaPipeline::MediaSourceAudio *kSource = dynamic_cast<IMediaPipeline::MediaSourceAudio *>(source.get());
593 7 : if (kSource)
594 : {
595 6 : firebolt::rialto::AudioConfig audioConfig = kSource->getAudioConfig();
596 : audioAttributes =
597 18 : firebolt::rialto::wrappers::AudioAttributesPrivate{"", // param set below.
598 6 : audioConfig.numberOfChannels, audioConfig.sampleRate,
599 : 0, // used only in one of logs in rdk_gstreamer_utils, no
600 : // need to set this param.
601 : 0, // used only in one of logs in rdk_gstreamer_utils, no
602 : // need to set this param.
603 6 : audioConfig.codecSpecificConfig.data(),
604 : static_cast<std::uint32_t>(
605 6 : audioConfig.codecSpecificConfig.size())};
606 6 : if (source->getMimeType() == "audio/mp4" || source->getMimeType() == "audio/aac")
607 : {
608 4 : audioAttributes->m_codecParam = "mp4a";
609 : }
610 2 : else if (source->getMimeType() == "audio/x-eac3")
611 : {
612 1 : audioAttributes->m_codecParam = "ec-3";
613 : }
614 1 : else if (source->getMimeType() == "audio/b-wav" || source->getMimeType() == "audio/x-raw")
615 : {
616 1 : audioAttributes->m_codecParam = "lpcm";
617 : }
618 6 : }
619 : else
620 : {
621 1 : RIALTO_SERVER_LOG_ERROR("Failed to cast source");
622 : }
623 :
624 7 : return audioAttributes;
625 : }
626 :
627 2 : void GstGenericPlayer::configAudioCap(firebolt::rialto::wrappers::AudioAttributesPrivate *pAttrib, bool *audioaac,
628 : bool svpenabled, GstCaps **appsrcCaps)
629 : {
630 : // this function comes from rdk_gstreamer_utils
631 2 : if (!pAttrib || !audioaac || !appsrcCaps)
632 : {
633 0 : RIALTO_SERVER_LOG_ERROR("configAudioCap: invalid null parameter");
634 0 : return;
635 : }
636 : gchar *capsString;
637 2 : RIALTO_SERVER_LOG_DEBUG("Config audio codec %s sampling rate %d channel %d alignment %d",
638 : pAttrib->m_codecParam.c_str(), pAttrib->m_samplesPerSecond, pAttrib->m_numberOfChannels,
639 : pAttrib->m_blockAlignment);
640 6 : if (pAttrib->m_codecParam.compare(0, 4, std::string("mp4a")) == 0)
641 : {
642 2 : RIALTO_SERVER_LOG_DEBUG("Using AAC");
643 2 : capsString = m_glibWrapper->gStrdupPrintf("audio/mpeg, mpegversion=4, enable-svp=(string)%s",
644 : svpenabled ? "true" : "false");
645 2 : *audioaac = true;
646 : }
647 : else
648 : {
649 0 : RIALTO_SERVER_LOG_DEBUG("Using EAC3");
650 0 : capsString = m_glibWrapper->gStrdupPrintf("audio/x-eac3, framed=(boolean)true, rate=(int)%u, channels=(int)%u, "
651 : "alignment=(string)frame, enable-svp=(string)%s",
652 : pAttrib->m_samplesPerSecond, pAttrib->m_numberOfChannels,
653 : svpenabled ? "true" : "false");
654 0 : *audioaac = false;
655 : }
656 2 : *appsrcCaps = m_gstWrapper->gstCapsFromString(capsString);
657 2 : m_glibWrapper->gFree(capsString);
658 : }
659 :
660 1 : void GstGenericPlayer::haltAudioPlayback()
661 : {
662 : // this function comes from rdk_gstreamer_utils
663 1 : if (!m_context.playbackGroup.m_curAudioPlaysinkBin || !m_context.playbackGroup.m_curAudioDecodeBin)
664 : {
665 0 : RIALTO_SERVER_LOG_ERROR("haltAudioPlayback: audio playsink bin or decode bin is null");
666 0 : return;
667 : }
668 1 : GstState currentState{GST_STATE_VOID_PENDING}, pending{GST_STATE_VOID_PENDING};
669 :
670 : // Transition Playsink to Ready
671 1 : if (GST_STATE_CHANGE_FAILURE ==
672 1 : m_gstWrapper->gstElementSetState(m_context.playbackGroup.m_curAudioPlaysinkBin, GST_STATE_READY))
673 : {
674 0 : RIALTO_SERVER_LOG_WARN("Failed to set AudioPlaysinkBin to READY");
675 0 : return;
676 : }
677 1 : m_gstWrapper->gstElementGetState(m_context.playbackGroup.m_curAudioPlaysinkBin, ¤tState, &pending,
678 : GST_CLOCK_TIME_NONE);
679 1 : if (currentState == GST_STATE_PAUSED)
680 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioPlaySinkBin State = %d", currentState);
681 : // Transition Decodebin to Paused
682 1 : if (GST_STATE_CHANGE_FAILURE ==
683 1 : m_gstWrapper->gstElementSetState(m_context.playbackGroup.m_curAudioDecodeBin, GST_STATE_PAUSED))
684 : {
685 0 : RIALTO_SERVER_LOG_WARN("Failed to set AudioDecodeBin to PAUSED");
686 0 : return;
687 : }
688 1 : m_gstWrapper->gstElementGetState(m_context.playbackGroup.m_curAudioDecodeBin, ¤tState, &pending,
689 : GST_CLOCK_TIME_NONE);
690 1 : if (currentState == GST_STATE_PAUSED)
691 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current DecodeBin State = %d", currentState);
692 : }
693 :
694 1 : void GstGenericPlayer::resumeAudioPlayback()
695 : {
696 : // this function comes from rdk_gstreamer_utils
697 1 : if (!m_context.playbackGroup.m_curAudioPlaysinkBin || !m_context.playbackGroup.m_curAudioDecodeBin)
698 : {
699 0 : RIALTO_SERVER_LOG_ERROR("resumeAudioPlayback: audio playsink bin or decode bin is null");
700 0 : return;
701 : }
702 1 : GstState currentState{GST_STATE_VOID_PENDING}, pending{GST_STATE_VOID_PENDING};
703 1 : m_gstWrapper->gstElementSyncStateWithParent(m_context.playbackGroup.m_curAudioPlaysinkBin);
704 1 : m_gstWrapper->gstElementGetState(m_context.playbackGroup.m_curAudioPlaysinkBin, ¤tState, &pending,
705 : GST_CLOCK_TIME_NONE);
706 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> AudioPlaysinkbin State = %d Pending = %d", currentState, pending);
707 1 : m_gstWrapper->gstElementSyncStateWithParent(m_context.playbackGroup.m_curAudioDecodeBin);
708 1 : m_gstWrapper->gstElementGetState(m_context.playbackGroup.m_curAudioDecodeBin, ¤tState, &pending,
709 : GST_CLOCK_TIME_NONE);
710 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> Decodebin State = %d Pending = %d", currentState, pending);
711 : }
712 :
713 1 : void GstGenericPlayer::firstTimeSwitchFromAC3toAAC(GstCaps *newAudioCaps)
714 : {
715 : // this function comes from rdk_gstreamer_utils
716 1 : if (!m_context.playbackGroup.m_curAudioTypefind || !m_context.playbackGroup.m_curAudioDecodeBin)
717 : {
718 0 : RIALTO_SERVER_LOG_ERROR("firstTimeSwitchFromAC3toAAC: audio typefind or decode bin is null");
719 0 : return;
720 : }
721 1 : GstState currentState{GST_STATE_VOID_PENDING}, pending{GST_STATE_VOID_PENDING};
722 1 : GstPad *pTypfdSrcPad = NULL;
723 1 : GstPad *pTypfdSrcPeerPad = NULL;
724 1 : GstPad *pNewAudioDecoderSrcPad = NULL;
725 1 : GstElement *newAudioParse = NULL;
726 1 : GstElement *newAudioDecoder = NULL;
727 1 : GstElement *newQueue = NULL;
728 1 : gboolean linkRet = false;
729 :
730 : /* Get the SinkPad of ASink - pTypfdSrcPeerPad */
731 1 : if ((pTypfdSrcPad = m_gstWrapper->gstElementGetStaticPad(m_context.playbackGroup.m_curAudioTypefind, "src")) !=
732 : NULL) // Unref the Pad
733 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current Typefind SrcPad = %p", pTypfdSrcPad);
734 1 : if ((pTypfdSrcPeerPad = m_gstWrapper->gstPadGetPeer(pTypfdSrcPad)) != NULL) // Unref the Pad
735 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current Typefind Src Downstream Element Pad = %p", pTypfdSrcPeerPad);
736 : // AudioDecoder Downstream Unlink
737 1 : if (m_gstWrapper->gstPadUnlink(pTypfdSrcPad, pTypfdSrcPeerPad) == FALSE)
738 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Typefind Downstream Unlink Failed");
739 1 : newAudioParse = m_gstWrapper->gstElementFactoryMake("aacparse", "aacparse");
740 1 : newAudioDecoder = m_gstWrapper->gstElementFactoryMake("avdec_aac", "avdec_aac");
741 1 : newQueue = m_gstWrapper->gstElementFactoryMake("queue", "aqueue");
742 : // Add new Decoder to Decodebin
743 1 : if (m_gstWrapper->gstBinAdd(GST_BIN(m_context.playbackGroup.m_curAudioDecodeBin.load()), newAudioDecoder) == TRUE)
744 : {
745 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> Added New AudioDecoder = %p", newAudioDecoder);
746 : }
747 : // Add new Parser to Decodebin
748 1 : if (m_gstWrapper->gstBinAdd(GST_BIN(m_context.playbackGroup.m_curAudioDecodeBin.load()), newAudioParse) == TRUE)
749 : {
750 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> Added New AudioParser = %p", newAudioParse);
751 : }
752 : // Add new Queue to Decodebin
753 1 : if (m_gstWrapper->gstBinAdd(GST_BIN(m_context.playbackGroup.m_curAudioDecodeBin.load()), newQueue) == TRUE)
754 : {
755 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> Added New queue = %p", newQueue);
756 : }
757 1 : if ((pNewAudioDecoderSrcPad = m_gstWrapper->gstElementGetStaticPad(newAudioDecoder, "src")) != NULL) // Unref the Pad
758 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioDecoder Src Pad = %p", pNewAudioDecoderSrcPad);
759 : // Connect decoder to ASINK
760 1 : if (m_gstWrapper->gstPadLink(pNewAudioDecoderSrcPad, pTypfdSrcPeerPad) != GST_PAD_LINK_OK)
761 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioDecoder Downstream Link Failed");
762 2 : linkRet = m_gstWrapper->gstElementLink(newAudioParse, newQueue) &&
763 1 : m_gstWrapper->gstElementLink(newQueue, newAudioDecoder);
764 1 : if (!linkRet)
765 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Downstream Link Failed for typefind, parser, decoder");
766 : /* Force Caps */
767 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> Typefind Setting to READY");
768 1 : if (GST_STATE_CHANGE_FAILURE ==
769 1 : m_gstWrapper->gstElementSetState(m_context.playbackGroup.m_curAudioTypefind, GST_STATE_READY))
770 : {
771 0 : RIALTO_SERVER_LOG_WARN("Failed to set Typefind to READY");
772 0 : m_gstWrapper->gstObjectUnref(pTypfdSrcPad);
773 0 : m_gstWrapper->gstObjectUnref(pTypfdSrcPeerPad);
774 0 : m_gstWrapper->gstObjectUnref(pNewAudioDecoderSrcPad);
775 0 : return;
776 : }
777 1 : m_glibWrapper->gObjectSet(G_OBJECT(m_context.playbackGroup.m_curAudioTypefind), "force-caps", newAudioCaps, NULL);
778 1 : m_gstWrapper->gstElementSyncStateWithParent(m_context.playbackGroup.m_curAudioTypefind);
779 1 : m_gstWrapper->gstElementGetState(m_context.playbackGroup.m_curAudioTypefind, ¤tState, &pending,
780 : GST_CLOCK_TIME_NONE);
781 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> New Typefind State = %d Pending = %d", currentState, pending);
782 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> Typefind Syncing with Parent");
783 1 : m_context.playbackGroup.m_linkTypefindParser = true;
784 : /* Update the state */
785 1 : m_gstWrapper->gstElementSyncStateWithParent(newAudioDecoder);
786 1 : m_gstWrapper->gstElementGetState(newAudioDecoder, ¤tState, &pending, GST_CLOCK_TIME_NONE);
787 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioDecoder State = %d Pending = %d", currentState, pending);
788 1 : m_gstWrapper->gstElementSyncStateWithParent(newQueue);
789 1 : m_gstWrapper->gstElementGetState(newQueue, ¤tState, &pending, GST_CLOCK_TIME_NONE);
790 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> New queue State = %d Pending = %d", currentState, pending);
791 1 : m_gstWrapper->gstElementSyncStateWithParent(newAudioParse);
792 1 : m_gstWrapper->gstElementGetState(newAudioParse, ¤tState, &pending, GST_CLOCK_TIME_NONE);
793 1 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioParser State = %d Pending = %d", currentState, pending);
794 1 : m_gstWrapper->gstObjectUnref(pTypfdSrcPad);
795 1 : m_gstWrapper->gstObjectUnref(pTypfdSrcPeerPad);
796 1 : m_gstWrapper->gstObjectUnref(pNewAudioDecoderSrcPad);
797 1 : return;
798 : }
799 :
800 1 : bool GstGenericPlayer::switchAudioCodec(bool isAudioAAC, GstCaps *newAudioCaps)
801 : { // this function comes from rdk_gstreamer_utils
802 1 : bool ret = false;
803 1 : RIALTO_SERVER_LOG_DEBUG("Current Audio Codec AAC = %d Same as Incoming audio Codec AAC = %d",
804 : m_context.playbackGroup.m_isAudioAAC, isAudioAAC);
805 1 : if (m_context.playbackGroup.m_isAudioAAC == isAudioAAC)
806 : {
807 0 : return ret;
808 : }
809 1 : if ((m_context.playbackGroup.m_curAudioDecoder == NULL) && (!(m_context.playbackGroup.m_isAudioAAC)) && (isAudioAAC))
810 : {
811 1 : firstTimeSwitchFromAC3toAAC(newAudioCaps);
812 1 : m_context.playbackGroup.m_isAudioAAC = isAudioAAC;
813 1 : return true;
814 : }
815 0 : if (!m_context.playbackGroup.m_curAudioDecoder || !m_context.playbackGroup.m_curAudioParse ||
816 0 : !m_context.playbackGroup.m_curAudioDecodeBin)
817 : {
818 0 : RIALTO_SERVER_LOG_ERROR("switchAudioCodec: audio decoder, parser or decode bin is null");
819 0 : return false;
820 : }
821 0 : GstElement *newAudioParse = NULL;
822 0 : GstElement *newAudioDecoder = NULL;
823 0 : GstPad *newAudioParseSrcPad = NULL;
824 0 : GstPad *newAudioParseSinkPad = NULL;
825 0 : GstPad *newAudioDecoderSrcPad = NULL;
826 0 : GstPad *newAudioDecoderSinkPad = NULL;
827 0 : GstPad *audioDecSrcPad = NULL;
828 0 : GstPad *audioDecSinkPad = NULL;
829 0 : GstPad *audioDecSrcPeerPad = NULL;
830 0 : GstPad *audioDecSinkPeerPad = NULL;
831 0 : GstPad *audioParseSrcPad = NULL;
832 0 : GstPad *audioParseSinkPad = NULL;
833 0 : GstPad *audioParseSrcPeerPad = NULL;
834 0 : GstPad *audioParseSinkPeerPad = NULL;
835 0 : GstState currentState{GST_STATE_VOID_PENDING}, pending{GST_STATE_VOID_PENDING};
836 :
837 : // Get AudioDecoder Src Pads
838 0 : if ((audioDecSrcPad = m_gstWrapper->gstElementGetStaticPad(m_context.playbackGroup.m_curAudioDecoder, "src")) !=
839 : NULL) // Unref the Pad
840 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioDecoder Src Pad = %p", audioDecSrcPad);
841 : // Get AudioDecoder Sink Pads
842 0 : if ((audioDecSinkPad = m_gstWrapper->gstElementGetStaticPad(m_context.playbackGroup.m_curAudioDecoder, "sink")) !=
843 : NULL) // Unref the Pad
844 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioDecoder Sink Pad = %p", audioDecSinkPad);
845 : // Get AudioDecoder Src Peer i.e. Downstream Element Pad
846 0 : if ((audioDecSrcPeerPad = m_gstWrapper->gstPadGetPeer(audioDecSrcPad)) != NULL) // Unref the Pad
847 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioDecoder Src Downstream Element Pad = %p", audioDecSrcPeerPad);
848 : // Get AudioDecoder Sink Peer i.e. Upstream Element Pad
849 0 : if ((audioDecSinkPeerPad = m_gstWrapper->gstPadGetPeer(audioDecSinkPad)) != NULL) // Unref the Pad
850 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioDecoder Sink Upstream Element Pad = %p", audioDecSinkPeerPad);
851 : // Get AudioParser Src Pads
852 0 : if ((audioParseSrcPad = m_gstWrapper->gstElementGetStaticPad(m_context.playbackGroup.m_curAudioParse, "src")) !=
853 : NULL) // Unref the Pad
854 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioParser Src Pad = %p", audioParseSrcPad);
855 : // Get AudioParser Sink Pads
856 0 : if ((audioParseSinkPad = m_gstWrapper->gstElementGetStaticPad(m_context.playbackGroup.m_curAudioParse, "sink")) !=
857 : NULL) // Unref the Pad
858 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioParser Sink Pad = %p", audioParseSinkPad);
859 : // Get AudioParser Src Peer i.e. Downstream Element Pad
860 0 : if ((audioParseSrcPeerPad = m_gstWrapper->gstPadGetPeer(audioParseSrcPad)) != NULL) // Unref the Peer Pad
861 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioParser Src Downstream Element Pad = %p", audioParseSrcPeerPad);
862 : // Get AudioParser Sink Peer i.e. Upstream Element Pad
863 0 : if ((audioParseSinkPeerPad = m_gstWrapper->gstPadGetPeer(audioParseSinkPad)) != NULL) // Unref the Peer Pad
864 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioParser Sink Upstream Element Pad = %p", audioParseSinkPeerPad);
865 : // AudioDecoder Downstream Unlink
866 0 : if (m_gstWrapper->gstPadUnlink(audioDecSrcPad, audioDecSrcPeerPad) == FALSE)
867 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> AudioDecoder Downstream Unlink Failed");
868 : // AudioDecoder Upstream Unlink
869 0 : if (m_gstWrapper->gstPadUnlink(audioDecSinkPeerPad, audioDecSinkPad) == FALSE)
870 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> AudioDecoder Upstream Unlink Failed");
871 : // AudioParser Downstream Unlink
872 0 : if (m_gstWrapper->gstPadUnlink(audioParseSrcPad, audioParseSrcPeerPad) == FALSE)
873 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> AudioParser Downstream Unlink Failed");
874 : // AudioParser Upstream Unlink
875 0 : if (m_gstWrapper->gstPadUnlink(audioParseSinkPeerPad, audioParseSinkPad) == FALSE)
876 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> AudioParser Upstream Unlink Failed");
877 : // Current Audio Decoder NULL
878 0 : if (GST_STATE_CHANGE_FAILURE ==
879 0 : m_gstWrapper->gstElementSetState(m_context.playbackGroup.m_curAudioDecoder, GST_STATE_NULL))
880 : {
881 0 : RIALTO_SERVER_LOG_WARN("Failed to set AudioDecoder to NULL");
882 : }
883 0 : m_gstWrapper->gstElementGetState(m_context.playbackGroup.m_curAudioDecoder, ¤tState, &pending,
884 : GST_CLOCK_TIME_NONE);
885 0 : if (currentState == GST_STATE_NULL)
886 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioDecoder State = %d", currentState);
887 : // Current Audio Parser NULL
888 0 : if (GST_STATE_CHANGE_FAILURE ==
889 0 : m_gstWrapper->gstElementSetState(m_context.playbackGroup.m_curAudioParse, GST_STATE_NULL))
890 : {
891 0 : RIALTO_SERVER_LOG_WARN("Failed to set AudioParser to NULL");
892 : }
893 0 : m_gstWrapper->gstElementGetState(m_context.playbackGroup.m_curAudioParse, ¤tState, &pending,
894 : GST_CLOCK_TIME_NONE);
895 0 : if (currentState == GST_STATE_NULL)
896 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Current AudioParser State = %d", currentState);
897 : // Remove Audio Decoder From Decodebin
898 0 : if (m_gstWrapper->gstBinRemove(GST_BIN(m_context.playbackGroup.m_curAudioDecodeBin.load()),
899 0 : m_context.playbackGroup.m_curAudioDecoder) == TRUE)
900 : {
901 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Removed AudioDecoder = %p", m_context.playbackGroup.m_curAudioDecoder);
902 0 : m_context.playbackGroup.m_curAudioDecoder = NULL;
903 : }
904 : // Remove Audio Parser From Decodebin
905 0 : if (m_gstWrapper->gstBinRemove(GST_BIN(m_context.playbackGroup.m_curAudioDecodeBin.load()),
906 0 : m_context.playbackGroup.m_curAudioParse) == TRUE)
907 : {
908 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Removed AudioParser = %p", m_context.playbackGroup.m_curAudioParse);
909 0 : m_context.playbackGroup.m_curAudioParse = NULL;
910 : }
911 : // Create new Audio Decoder and Parser. The inverse of the current
912 0 : if (m_context.playbackGroup.m_isAudioAAC)
913 : {
914 0 : newAudioParse = m_gstWrapper->gstElementFactoryMake("ac3parse", "ac3parse");
915 0 : newAudioDecoder = m_gstWrapper->gstElementFactoryMake("identity", "fake_aud_ac3dec");
916 : }
917 : else
918 : {
919 0 : newAudioParse = m_gstWrapper->gstElementFactoryMake("aacparse", "aacparse");
920 0 : newAudioDecoder = m_gstWrapper->gstElementFactoryMake("avdec_aac", "avdec_aac");
921 : }
922 : {
923 0 : GstPadLinkReturn gstPadLinkRet = GST_PAD_LINK_OK;
924 0 : GstElement *audioParseUpstreamEl = NULL;
925 : // Add new Decoder to Decodebin
926 0 : if (m_gstWrapper->gstBinAdd(GST_BIN(m_context.playbackGroup.m_curAudioDecodeBin.load()), newAudioDecoder) == TRUE)
927 : {
928 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Added New AudioDecoder = %p", newAudioDecoder);
929 : }
930 : // Add new Parser to Decodebin
931 0 : if (m_gstWrapper->gstBinAdd(GST_BIN(m_context.playbackGroup.m_curAudioDecodeBin.load()), newAudioParse) == TRUE)
932 : {
933 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Added New AudioParser = %p", newAudioParse);
934 : }
935 0 : if ((newAudioDecoderSrcPad = m_gstWrapper->gstElementGetStaticPad(newAudioDecoder, "src")) !=
936 : NULL) // Unref the Pad
937 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioDecoder Src Pad = %p", newAudioDecoderSrcPad);
938 0 : if ((newAudioDecoderSinkPad = m_gstWrapper->gstElementGetStaticPad(newAudioDecoder, "sink")) !=
939 : NULL) // Unref the Pad
940 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioDecoder Sink Pad = %p", newAudioDecoderSinkPad);
941 : // Link New Decoder to Downstream followed by UpStream
942 0 : if ((gstPadLinkRet = m_gstWrapper->gstPadLink(newAudioDecoderSrcPad, audioDecSrcPeerPad)) != GST_PAD_LINK_OK)
943 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioDecoder Downstream Link Failed");
944 0 : if ((gstPadLinkRet = m_gstWrapper->gstPadLink(audioDecSinkPeerPad, newAudioDecoderSinkPad)) != GST_PAD_LINK_OK)
945 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioDecoder Upstream Link Failed");
946 0 : if ((newAudioParseSrcPad = m_gstWrapper->gstElementGetStaticPad(newAudioParse, "src")) != NULL) // Unref the Pad
947 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioParser Src Pad = %p", newAudioParseSrcPad);
948 0 : if ((newAudioParseSinkPad = m_gstWrapper->gstElementGetStaticPad(newAudioParse, "sink")) != NULL) // Unref the Pad
949 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioParser Sink Pad = %p", newAudioParseSinkPad);
950 : // Link New Parser to Downstream followed by UpStream
951 0 : if ((gstPadLinkRet = m_gstWrapper->gstPadLink(newAudioParseSrcPad, audioParseSrcPeerPad)) != GST_PAD_LINK_OK)
952 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioParser Downstream Link Failed %d", gstPadLinkRet);
953 0 : if ((audioParseUpstreamEl = GST_ELEMENT_CAST(m_gstWrapper->gstPadGetParent(audioParseSinkPeerPad))) ==
954 0 : m_context.playbackGroup.m_curAudioTypefind)
955 : {
956 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Typefind Setting to READY");
957 0 : if (GST_STATE_CHANGE_FAILURE == m_gstWrapper->gstElementSetState(audioParseUpstreamEl, GST_STATE_READY))
958 : {
959 0 : RIALTO_SERVER_LOG_WARN("Failed to set Typefind to READY in switchAudioCodec");
960 : }
961 0 : m_glibWrapper->gObjectSet(G_OBJECT(audioParseUpstreamEl), "force-caps", newAudioCaps, NULL);
962 0 : m_gstWrapper->gstElementSyncStateWithParent(audioParseUpstreamEl);
963 0 : m_gstWrapper->gstElementGetState(audioParseUpstreamEl, ¤tState, &pending, GST_CLOCK_TIME_NONE);
964 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New Typefind State = %d Pending = %d", currentState, pending);
965 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> Typefind Syncing with Parent");
966 0 : m_context.playbackGroup.m_linkTypefindParser = true;
967 0 : m_gstWrapper->gstObjectUnref(audioParseUpstreamEl);
968 : }
969 0 : m_gstWrapper->gstObjectUnref(newAudioDecoderSrcPad);
970 0 : m_gstWrapper->gstObjectUnref(newAudioDecoderSinkPad);
971 0 : m_gstWrapper->gstObjectUnref(newAudioParseSrcPad);
972 0 : m_gstWrapper->gstObjectUnref(newAudioParseSinkPad);
973 : }
974 0 : m_gstWrapper->gstObjectUnref(audioParseSinkPeerPad);
975 0 : m_gstWrapper->gstObjectUnref(audioParseSrcPeerPad);
976 0 : m_gstWrapper->gstObjectUnref(audioParseSinkPad);
977 0 : m_gstWrapper->gstObjectUnref(audioParseSrcPad);
978 0 : m_gstWrapper->gstObjectUnref(audioDecSinkPeerPad);
979 0 : m_gstWrapper->gstObjectUnref(audioDecSrcPeerPad);
980 0 : m_gstWrapper->gstObjectUnref(audioDecSinkPad);
981 0 : m_gstWrapper->gstObjectUnref(audioDecSrcPad);
982 0 : m_gstWrapper->gstElementSyncStateWithParent(newAudioDecoder);
983 0 : m_gstWrapper->gstElementGetState(newAudioDecoder, ¤tState, &pending, GST_CLOCK_TIME_NONE);
984 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioDecoder State = %d Pending = %d", currentState, pending);
985 0 : m_gstWrapper->gstElementSyncStateWithParent(newAudioParse);
986 0 : m_gstWrapper->gstElementGetState(newAudioParse, ¤tState, &pending, GST_CLOCK_TIME_NONE);
987 0 : RIALTO_SERVER_LOG_DEBUG("OTF -> New AudioParser State = %d Pending = %d", currentState, pending);
988 0 : m_context.playbackGroup.m_isAudioAAC = isAudioAAC;
989 0 : return true;
990 : }
991 :
992 2 : bool GstGenericPlayer::performAudioTrackCodecChannelSwitch(const void *pSampleAttr,
993 : firebolt::rialto::wrappers::AudioAttributesPrivate *pAudioAttr,
994 : uint32_t *pStatus, unsigned int *pui32Delay,
995 : long long *pAudioChangeTargetPts, // NOLINT(runtime/int)
996 : const long long *pcurrentDispPts, // NOLINT(runtime/int)
997 : unsigned int *audioChangeStage, GstCaps **appsrcCaps,
998 : bool *audioaac, bool svpenabled, GstElement *aSrc, bool *ret)
999 : {
1000 : // this function comes from rdk_gstreamer_utils
1001 2 : if (!pStatus || !pui32Delay || !pAudioChangeTargetPts || !pcurrentDispPts || !audioChangeStage || !appsrcCaps ||
1002 2 : !audioaac || !aSrc || !ret)
1003 : {
1004 0 : RIALTO_SERVER_LOG_ERROR("performAudioTrackCodecChannelSwitch: invalid null parameter");
1005 0 : return false;
1006 : }
1007 :
1008 2 : constexpr uint32_t kOk = 0;
1009 2 : constexpr uint32_t kWaitWhileIdling = 100;
1010 2 : constexpr int kAudioChangeGapThresholdMS = 40;
1011 2 : constexpr unsigned int kAudchgAlign = 3;
1012 :
1013 : struct timespec ts, now;
1014 : unsigned int reconfigDelayMs;
1015 2 : clock_gettime(CLOCK_MONOTONIC, &ts);
1016 2 : if (*pStatus != kOk || pSampleAttr == nullptr)
1017 : {
1018 0 : RIALTO_SERVER_LOG_DEBUG("No audio data ready yet");
1019 0 : *pui32Delay = kWaitWhileIdling;
1020 0 : *ret = false;
1021 0 : return true;
1022 : }
1023 2 : RIALTO_SERVER_LOG_DEBUG("Received first audio packet after a flush, PTS");
1024 2 : if (pAudioAttr)
1025 : {
1026 2 : const char *pCodecStr = pAudioAttr->m_codecParam.c_str();
1027 2 : const char *pCodecAcc = strstr(pCodecStr, "mp4a");
1028 2 : bool isAudioAAC = (pCodecAcc) ? true : false;
1029 2 : bool isCodecSwitch = false;
1030 2 : RIALTO_SERVER_LOG_DEBUG("Audio Attribute format %s channel %d samp %d, bitrate %d blockAlignment %d", pCodecStr,
1031 : pAudioAttr->m_numberOfChannels, pAudioAttr->m_samplesPerSecond, pAudioAttr->m_bitrate,
1032 : pAudioAttr->m_blockAlignment);
1033 2 : *pAudioChangeTargetPts = *pcurrentDispPts;
1034 2 : *audioChangeStage = kAudchgAlign;
1035 2 : if (*appsrcCaps)
1036 : {
1037 2 : m_gstWrapper->gstCapsUnref(*appsrcCaps);
1038 2 : *appsrcCaps = NULL;
1039 : }
1040 2 : if (isAudioAAC != *audioaac)
1041 1 : isCodecSwitch = true;
1042 2 : configAudioCap(pAudioAttr, audioaac, svpenabled, appsrcCaps);
1043 : {
1044 2 : gboolean sendRet = FALSE;
1045 2 : GstEvent *flushStart = NULL;
1046 2 : GstEvent *flushStop = NULL;
1047 2 : flushStart = m_gstWrapper->gstEventNewFlushStart();
1048 2 : sendRet = m_gstWrapper->gstElementSendEvent(aSrc, flushStart);
1049 2 : if (!sendRet)
1050 0 : RIALTO_SERVER_LOG_DEBUG("failed to send flush-start event");
1051 2 : flushStop = m_gstWrapper->gstEventNewFlushStop(TRUE);
1052 2 : sendRet = m_gstWrapper->gstElementSendEvent(aSrc, flushStop);
1053 2 : if (!sendRet)
1054 0 : RIALTO_SERVER_LOG_DEBUG("failed to send flush-stop event");
1055 : }
1056 2 : if (!isCodecSwitch)
1057 : {
1058 1 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(aSrc), *appsrcCaps);
1059 : }
1060 : else
1061 : {
1062 1 : RIALTO_SERVER_LOG_DEBUG("CODEC SWITCH mAudioAAC = %d", *audioaac);
1063 1 : haltAudioPlayback();
1064 1 : if (switchAudioCodec(*audioaac, *appsrcCaps) == false)
1065 : {
1066 0 : RIALTO_SERVER_LOG_DEBUG("CODEC SWITCH FAILED switchAudioCodec mAudioAAC = %d", *audioaac);
1067 : }
1068 1 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(aSrc), *appsrcCaps);
1069 1 : resumeAudioPlayback();
1070 : }
1071 2 : clock_gettime(CLOCK_MONOTONIC, &now);
1072 2 : reconfigDelayMs = now.tv_nsec > ts.tv_nsec ? (now.tv_nsec - ts.tv_nsec) / 1000000
1073 0 : : (1000 - (ts.tv_nsec - now.tv_nsec) / 1000000);
1074 2 : (*pAudioChangeTargetPts) += (reconfigDelayMs + kAudioChangeGapThresholdMS);
1075 : }
1076 : else
1077 : {
1078 0 : RIALTO_SERVER_LOG_DEBUG("first audio after change no attribute drop!");
1079 0 : *pui32Delay = 0;
1080 0 : *ret = false;
1081 0 : return true;
1082 : }
1083 2 : *ret = true;
1084 2 : return true;
1085 : }
1086 :
1087 1 : bool GstGenericPlayer::setImmediateOutput(const MediaSourceType &mediaSourceType, bool immediateOutputParam)
1088 : {
1089 1 : if (!m_workerThread)
1090 0 : return false;
1091 :
1092 2 : m_workerThread->enqueueTask(
1093 2 : m_taskFactory->createSetImmediateOutput(m_context, *this, mediaSourceType, immediateOutputParam));
1094 1 : return true;
1095 : }
1096 :
1097 5 : bool GstGenericPlayer::getImmediateOutput(const MediaSourceType &mediaSourceType, bool &immediateOutputRef)
1098 : {
1099 5 : bool returnValue{false};
1100 5 : GstElement *sink{getSink(mediaSourceType)};
1101 5 : if (sink)
1102 : {
1103 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "immediate-output"))
1104 : {
1105 2 : m_glibWrapper->gObjectGet(sink, "immediate-output", &immediateOutputRef, nullptr);
1106 2 : returnValue = true;
1107 : }
1108 : else
1109 : {
1110 1 : RIALTO_SERVER_LOG_ERROR("immediate-output not supported in element %s", GST_ELEMENT_NAME(sink));
1111 : }
1112 3 : m_gstWrapper->gstObjectUnref(sink);
1113 : }
1114 : else
1115 : {
1116 2 : RIALTO_SERVER_LOG_ERROR("Failed to set immediate-output property, sink is NULL");
1117 : }
1118 :
1119 5 : return returnValue;
1120 : }
1121 :
1122 5 : bool GstGenericPlayer::getStats(const MediaSourceType &mediaSourceType, uint64_t &renderedFrames, uint64_t &droppedFrames)
1123 : {
1124 5 : bool returnValue{false};
1125 5 : GstElement *sink{getSink(mediaSourceType)};
1126 5 : if (sink)
1127 : {
1128 3 : GstStructure *stats{nullptr};
1129 3 : m_glibWrapper->gObjectGet(sink, "stats", &stats, nullptr);
1130 3 : if (!stats)
1131 : {
1132 1 : RIALTO_SERVER_LOG_ERROR("failed to get stats from '%s'", GST_ELEMENT_NAME(sink));
1133 : }
1134 : else
1135 : {
1136 : guint64 renderedFramesTmp;
1137 : guint64 droppedFramesTmp;
1138 3 : if (m_gstWrapper->gstStructureGetUint64(stats, "rendered", &renderedFramesTmp) &&
1139 1 : m_gstWrapper->gstStructureGetUint64(stats, "dropped", &droppedFramesTmp))
1140 : {
1141 1 : renderedFrames = renderedFramesTmp;
1142 1 : droppedFrames = droppedFramesTmp;
1143 1 : returnValue = true;
1144 : }
1145 : else
1146 : {
1147 1 : RIALTO_SERVER_LOG_ERROR("failed to get 'rendered' or 'dropped' from structure (%s)",
1148 : GST_ELEMENT_NAME(sink));
1149 : }
1150 2 : m_gstWrapper->gstStructureFree(stats);
1151 : }
1152 3 : m_gstWrapper->gstObjectUnref(sink);
1153 : }
1154 : else
1155 : {
1156 2 : RIALTO_SERVER_LOG_ERROR("Failed to get stats, sink is NULL");
1157 : }
1158 :
1159 5 : return returnValue;
1160 : }
1161 :
1162 4 : GstBuffer *GstGenericPlayer::createBuffer(const IMediaPipeline::MediaSegment &mediaSegment) const
1163 : {
1164 4 : GstBuffer *gstBuffer = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getDataLength(), nullptr);
1165 4 : m_gstWrapper->gstBufferFill(gstBuffer, 0, mediaSegment.getData(), mediaSegment.getDataLength());
1166 :
1167 4 : if (mediaSegment.isEncrypted())
1168 : {
1169 3 : GstBuffer *keyId = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getKeyId().size(), nullptr);
1170 3 : m_gstWrapper->gstBufferFill(keyId, 0, mediaSegment.getKeyId().data(), mediaSegment.getKeyId().size());
1171 :
1172 3 : GstBuffer *initVector = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getInitVector().size(), nullptr);
1173 6 : m_gstWrapper->gstBufferFill(initVector, 0, mediaSegment.getInitVector().data(),
1174 3 : mediaSegment.getInitVector().size());
1175 3 : GstBuffer *subsamples{nullptr};
1176 3 : if (!mediaSegment.getSubSamples().empty())
1177 : {
1178 3 : auto subsamplesRawSize = mediaSegment.getSubSamples().size() * (sizeof(guint16) + sizeof(guint32));
1179 3 : guint8 *subsamplesRaw = static_cast<guint8 *>(m_glibWrapper->gMalloc(subsamplesRawSize));
1180 : GstByteWriter writer;
1181 3 : m_gstWrapper->gstByteWriterInitWithData(&writer, subsamplesRaw, subsamplesRawSize, FALSE);
1182 :
1183 6 : for (const auto &subSample : mediaSegment.getSubSamples())
1184 : {
1185 3 : m_gstWrapper->gstByteWriterPutUint16Be(&writer, subSample.numClearBytes);
1186 3 : m_gstWrapper->gstByteWriterPutUint32Be(&writer, subSample.numEncryptedBytes);
1187 : }
1188 3 : subsamples = m_gstWrapper->gstBufferNewWrapped(subsamplesRaw, subsamplesRawSize);
1189 : }
1190 :
1191 3 : uint32_t crypt = 0;
1192 3 : uint32_t skip = 0;
1193 3 : bool encryptionPatternSet = mediaSegment.getEncryptionPattern(crypt, skip);
1194 :
1195 3 : GstRialtoProtectionData data = {mediaSegment.getMediaKeySessionId(),
1196 3 : static_cast<uint32_t>(mediaSegment.getSubSamples().size()),
1197 3 : mediaSegment.getInitWithLast15(),
1198 : keyId,
1199 : initVector,
1200 : subsamples,
1201 6 : mediaSegment.getCipherMode(),
1202 : crypt,
1203 : skip,
1204 : encryptionPatternSet,
1205 6 : m_context.decryptionService};
1206 :
1207 3 : if (!m_protectionMetadataWrapper->addProtectionMetadata(gstBuffer, data))
1208 : {
1209 1 : RIALTO_SERVER_LOG_ERROR("Failed to add protection metadata");
1210 1 : if (keyId)
1211 : {
1212 1 : m_gstWrapper->gstBufferUnref(keyId);
1213 : }
1214 1 : if (initVector)
1215 : {
1216 1 : m_gstWrapper->gstBufferUnref(initVector);
1217 : }
1218 1 : if (subsamples)
1219 : {
1220 1 : m_gstWrapper->gstBufferUnref(subsamples);
1221 : }
1222 : }
1223 : }
1224 :
1225 4 : GST_BUFFER_TIMESTAMP(gstBuffer) = mediaSegment.getTimeStamp();
1226 4 : GST_BUFFER_DURATION(gstBuffer) = mediaSegment.getDuration();
1227 4 : return gstBuffer;
1228 : }
1229 :
1230 4 : void GstGenericPlayer::notifyNeedMediaData(const MediaSourceType mediaSource)
1231 : {
1232 4 : auto elem = m_context.streamInfo.find(mediaSource);
1233 4 : if (elem != m_context.streamInfo.end())
1234 : {
1235 2 : StreamInfo &streamInfo = elem->second;
1236 2 : streamInfo.isNeedDataPending = false;
1237 :
1238 : // Send new NeedMediaData if we still need it
1239 2 : if (m_gstPlayerClient && streamInfo.isDataNeeded)
1240 : {
1241 2 : streamInfo.isNeedDataPending = m_gstPlayerClient->notifyNeedMediaData(mediaSource);
1242 : }
1243 : }
1244 : else
1245 : {
1246 2 : RIALTO_SERVER_LOG_WARN("Media type %s could not be found", common::convertMediaSourceType(mediaSource));
1247 : }
1248 4 : }
1249 :
1250 19 : void GstGenericPlayer::attachData(const firebolt::rialto::MediaSourceType mediaType)
1251 : {
1252 19 : auto elem = m_context.streamInfo.find(mediaType);
1253 19 : if (elem != m_context.streamInfo.end())
1254 : {
1255 16 : StreamInfo &streamInfo = elem->second;
1256 16 : if (streamInfo.buffers.empty() || !streamInfo.isDataNeeded)
1257 : {
1258 2 : return;
1259 : }
1260 :
1261 14 : if (firebolt::rialto::MediaSourceType::SUBTITLE == mediaType)
1262 : {
1263 2 : setTextTrackPositionIfRequired(streamInfo.appSrc);
1264 : }
1265 : else
1266 : {
1267 36 : pushSampleIfRequired(streamInfo.appSrc, common::convertMediaSourceType(mediaType));
1268 : }
1269 14 : if (mediaType == firebolt::rialto::MediaSourceType::AUDIO)
1270 : {
1271 : // This needs to be done before gstAppSrcPushBuffer() is
1272 : // called because it can free the memory
1273 7 : m_context.lastAudioSampleTimestamps = static_cast<int64_t>(GST_BUFFER_PTS(streamInfo.buffers.back()));
1274 : }
1275 :
1276 28 : for (GstBuffer *buffer : streamInfo.buffers)
1277 : {
1278 14 : m_gstWrapper->gstAppSrcPushBuffer(GST_APP_SRC(streamInfo.appSrc), buffer);
1279 : }
1280 14 : streamInfo.buffers.clear();
1281 14 : streamInfo.isDataPushed = true;
1282 :
1283 14 : const bool kIsSingle = m_context.streamInfo.size() == 1;
1284 14 : bool allOtherStreamsPushed = std::all_of(m_context.streamInfo.begin(), m_context.streamInfo.end(),
1285 15 : [](const auto &entry) { return entry.second.isDataPushed; });
1286 :
1287 14 : if (!m_context.bufferedNotificationSent && (allOtherStreamsPushed || kIsSingle) && m_gstPlayerClient)
1288 : {
1289 1 : m_context.bufferedNotificationSent = true;
1290 1 : m_gstPlayerClient->notifyNetworkState(NetworkState::BUFFERED);
1291 1 : RIALTO_SERVER_LOG_MIL("Buffered NetworkState reached");
1292 : }
1293 14 : cancelUnderflow(mediaType);
1294 :
1295 14 : const auto eosInfoIt = m_context.endOfStreamInfo.find(mediaType);
1296 14 : if (eosInfoIt != m_context.endOfStreamInfo.end() && eosInfoIt->second == EosState::PENDING)
1297 : {
1298 0 : setEos(mediaType);
1299 : }
1300 : }
1301 : }
1302 :
1303 7 : void GstGenericPlayer::updateAudioCaps(int32_t rate, int32_t channels, const std::shared_ptr<CodecData> &codecData)
1304 : {
1305 7 : auto elem = m_context.streamInfo.find(firebolt::rialto::MediaSourceType::AUDIO);
1306 7 : if (elem != m_context.streamInfo.end())
1307 : {
1308 6 : StreamInfo &streamInfo = elem->second;
1309 :
1310 6 : constexpr int kInvalidRate{0}, kInvalidChannels{0};
1311 6 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(streamInfo.appSrc));
1312 6 : GstCaps *newCaps = m_gstWrapper->gstCapsCopy(currentCaps);
1313 :
1314 6 : if (rate != kInvalidRate)
1315 : {
1316 3 : m_gstWrapper->gstCapsSetSimple(newCaps, "rate", G_TYPE_INT, rate, NULL);
1317 : }
1318 :
1319 6 : if (channels != kInvalidChannels)
1320 : {
1321 3 : m_gstWrapper->gstCapsSetSimple(newCaps, "channels", G_TYPE_INT, channels, NULL);
1322 : }
1323 :
1324 6 : setCodecData(newCaps, codecData);
1325 :
1326 6 : if (!m_gstWrapper->gstCapsIsEqual(currentCaps, newCaps))
1327 : {
1328 5 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(streamInfo.appSrc), newCaps);
1329 : }
1330 :
1331 6 : m_gstWrapper->gstCapsUnref(newCaps);
1332 6 : m_gstWrapper->gstCapsUnref(currentCaps);
1333 : }
1334 7 : }
1335 :
1336 8 : void GstGenericPlayer::updateVideoCaps(int32_t width, int32_t height, Fraction frameRate,
1337 : const std::shared_ptr<CodecData> &codecData)
1338 : {
1339 8 : auto elem = m_context.streamInfo.find(firebolt::rialto::MediaSourceType::VIDEO);
1340 8 : if (elem != m_context.streamInfo.end())
1341 : {
1342 7 : StreamInfo &streamInfo = elem->second;
1343 :
1344 7 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(streamInfo.appSrc));
1345 7 : GstCaps *newCaps = m_gstWrapper->gstCapsCopy(currentCaps);
1346 :
1347 7 : if (width > 0)
1348 : {
1349 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "width", G_TYPE_INT, width, NULL);
1350 : }
1351 :
1352 7 : if (height > 0)
1353 : {
1354 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "height", G_TYPE_INT, height, NULL);
1355 : }
1356 :
1357 7 : if ((kUndefinedSize != frameRate.numerator) && (kUndefinedSize != frameRate.denominator))
1358 : {
1359 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "framerate", GST_TYPE_FRACTION, frameRate.numerator,
1360 : frameRate.denominator, NULL);
1361 : }
1362 :
1363 7 : setCodecData(newCaps, codecData);
1364 :
1365 7 : if (!m_gstWrapper->gstCapsIsEqual(currentCaps, newCaps))
1366 : {
1367 6 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(streamInfo.appSrc), newCaps);
1368 : }
1369 :
1370 7 : m_gstWrapper->gstCapsUnref(currentCaps);
1371 7 : m_gstWrapper->gstCapsUnref(newCaps);
1372 : }
1373 8 : }
1374 :
1375 5 : void GstGenericPlayer::addAudioClippingToBuffer(GstBuffer *buffer, uint64_t clippingStart, uint64_t clippingEnd) const
1376 : {
1377 5 : if (clippingStart || clippingEnd)
1378 : {
1379 4 : if (m_gstWrapper->gstBufferAddAudioClippingMeta(buffer, GST_FORMAT_TIME, clippingStart, clippingEnd))
1380 : {
1381 3 : RIALTO_SERVER_LOG_DEBUG("Added audio clipping to buffer %p, start: %" PRIu64 ", end %" PRIu64, buffer,
1382 : clippingStart, clippingEnd);
1383 : }
1384 : else
1385 : {
1386 1 : RIALTO_SERVER_LOG_WARN("Failed to add audio clipping to buffer %p, start: %" PRIu64 ", end %" PRIu64,
1387 : buffer, clippingStart, clippingEnd);
1388 : }
1389 : }
1390 5 : }
1391 :
1392 13 : bool GstGenericPlayer::setCodecData(GstCaps *caps, const std::shared_ptr<CodecData> &codecData) const
1393 : {
1394 13 : if (codecData && CodecDataType::BUFFER == codecData->type)
1395 : {
1396 7 : gpointer memory = m_glibWrapper->gMemdup(codecData->data.data(), codecData->data.size());
1397 7 : GstBuffer *buf = m_gstWrapper->gstBufferNewWrapped(memory, codecData->data.size());
1398 7 : m_gstWrapper->gstCapsSetSimple(caps, "codec_data", GST_TYPE_BUFFER, buf, nullptr);
1399 7 : m_gstWrapper->gstBufferUnref(buf);
1400 7 : return true;
1401 : }
1402 6 : if (codecData && CodecDataType::STRING == codecData->type)
1403 : {
1404 2 : std::string codecDataStr(codecData->data.begin(), codecData->data.end());
1405 2 : m_gstWrapper->gstCapsSetSimple(caps, "codec_data", G_TYPE_STRING, codecDataStr.c_str(), nullptr);
1406 2 : return true;
1407 : }
1408 4 : return false;
1409 : }
1410 :
1411 12 : void GstGenericPlayer::pushSampleIfRequired(GstElement *source, const std::string &typeStr)
1412 : {
1413 12 : auto initialPosition = m_context.initialPositions.find(source);
1414 12 : if (m_context.initialPositions.end() == initialPosition)
1415 : {
1416 : // Sending initial sample not needed
1417 7 : return;
1418 : }
1419 : // GstAppSrc does not replace segment, if it's the same as previous one.
1420 : // It causes problems with position reporing in amlogic devices, so we need to push
1421 : // two segments with different reset time value.
1422 5 : pushAdditionalSegmentIfRequired(source);
1423 :
1424 10 : for (const auto &[position, resetTime, appliedRate, stopPosition] : initialPosition->second)
1425 : {
1426 6 : GstSeekFlags seekFlag = resetTime ? GST_SEEK_FLAG_FLUSH : GST_SEEK_FLAG_NONE;
1427 6 : RIALTO_SERVER_LOG_DEBUG("Pushing new %s sample...", typeStr.c_str());
1428 6 : GstSegment *segment{m_gstWrapper->gstSegmentNew()};
1429 6 : m_gstWrapper->gstSegmentInit(segment, GST_FORMAT_TIME);
1430 6 : if (!m_gstWrapper->gstSegmentDoSeek(segment, m_context.playbackRate, GST_FORMAT_TIME, seekFlag,
1431 : GST_SEEK_TYPE_SET, position, GST_SEEK_TYPE_SET, stopPosition, nullptr))
1432 : {
1433 1 : RIALTO_SERVER_LOG_WARN("Segment seek failed.");
1434 1 : m_gstWrapper->gstSegmentFree(segment);
1435 1 : m_context.initialPositions.erase(initialPosition);
1436 1 : return;
1437 : }
1438 5 : segment->applied_rate = appliedRate;
1439 5 : RIALTO_SERVER_LOG_MIL("New %s segment: [%" GST_TIME_FORMAT ", %" GST_TIME_FORMAT
1440 : "], rate: %f, appliedRate %f, reset_time: %d\n",
1441 : typeStr.c_str(), GST_TIME_ARGS(segment->start), GST_TIME_ARGS(segment->stop),
1442 : segment->rate, segment->applied_rate, resetTime);
1443 :
1444 5 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(source));
1445 : // We can't pass buffer in GstSample, because implementation of gst_app_src_push_sample
1446 : // uses gst_buffer_copy, which loses RialtoProtectionMeta (that causes problems with EME
1447 : // for first frame).
1448 5 : GstSample *sample = m_gstWrapper->gstSampleNew(nullptr, currentCaps, segment, nullptr);
1449 5 : m_gstWrapper->gstAppSrcPushSample(GST_APP_SRC(source), sample);
1450 5 : m_gstWrapper->gstSampleUnref(sample);
1451 5 : m_gstWrapper->gstCapsUnref(currentCaps);
1452 :
1453 5 : m_gstWrapper->gstSegmentFree(segment);
1454 : }
1455 4 : m_context.currentPosition[source] = initialPosition->second.back();
1456 4 : m_context.initialPositions.erase(initialPosition);
1457 4 : return;
1458 : }
1459 :
1460 5 : void GstGenericPlayer::pushAdditionalSegmentIfRequired(GstElement *source)
1461 : {
1462 5 : auto currentPosition = m_context.currentPosition.find(source);
1463 5 : if (m_context.currentPosition.end() == currentPosition)
1464 : {
1465 4 : return;
1466 : }
1467 1 : auto initialPosition = m_context.initialPositions.find(source);
1468 1 : if (m_context.initialPositions.end() == initialPosition)
1469 : {
1470 0 : return;
1471 : }
1472 2 : if (initialPosition->second.size() == 1 && initialPosition->second.back().resetTime &&
1473 1 : currentPosition->second == initialPosition->second.back())
1474 : {
1475 1 : RIALTO_SERVER_LOG_INFO("Adding additional segment with reset_time = false");
1476 1 : SegmentData additionalSegment = initialPosition->second.back();
1477 1 : additionalSegment.resetTime = false;
1478 1 : initialPosition->second.push_back(additionalSegment);
1479 : }
1480 : }
1481 :
1482 2 : void GstGenericPlayer::setTextTrackPositionIfRequired(GstElement *source)
1483 : {
1484 2 : auto initialPosition = m_context.initialPositions.find(source);
1485 2 : if (m_context.initialPositions.end() == initialPosition)
1486 : {
1487 : // Sending initial sample not needed
1488 1 : return;
1489 : }
1490 :
1491 1 : RIALTO_SERVER_LOG_MIL("New subtitle position set %" GST_TIME_FORMAT,
1492 : GST_TIME_ARGS(initialPosition->second.back().position));
1493 1 : m_glibWrapper->gObjectSet(m_context.subtitleSink, "position",
1494 1 : static_cast<guint64>(initialPosition->second.back().position), nullptr);
1495 :
1496 1 : m_context.initialPositions.erase(initialPosition);
1497 : }
1498 :
1499 9 : bool GstGenericPlayer::reattachSource(const std::unique_ptr<IMediaPipeline::MediaSource> &source)
1500 : {
1501 9 : if (m_context.streamInfo.find(source->getType()) == m_context.streamInfo.end())
1502 : {
1503 1 : RIALTO_SERVER_LOG_ERROR("Unable to switch source, type does not exist");
1504 1 : return false;
1505 : }
1506 8 : if (source->getMimeType().empty())
1507 : {
1508 1 : RIALTO_SERVER_LOG_WARN("Skip switch audio source. Unknown mime type");
1509 1 : return false;
1510 : }
1511 7 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate> audioAttributes{createAudioAttributes(source)};
1512 7 : if (!audioAttributes)
1513 : {
1514 1 : RIALTO_SERVER_LOG_ERROR("Failed to create audio attributes");
1515 1 : return false;
1516 : }
1517 :
1518 6 : long long currentDispPts = getPosition(m_context.pipeline); // NOLINT(runtime/int)
1519 6 : GstCaps *caps{createCapsFromMediaSource(m_gstWrapper, m_glibWrapper, source)};
1520 6 : GstAppSrc *appSrc{GST_APP_SRC(m_context.streamInfo[source->getType()].appSrc)};
1521 6 : GstCaps *oldCaps = m_gstWrapper->gstAppSrcGetCaps(appSrc);
1522 :
1523 6 : if ((!oldCaps) || (!m_gstWrapper->gstCapsIsEqual(caps, oldCaps)))
1524 : {
1525 5 : RIALTO_SERVER_LOG_DEBUG("Caps not equal. Perform audio track codec channel switch.");
1526 :
1527 5 : GstElement *sink = getSink(MediaSourceType::AUDIO);
1528 5 : if (!sink)
1529 : {
1530 0 : RIALTO_SERVER_LOG_ERROR("Failed to get audio sink");
1531 0 : if (caps)
1532 0 : m_gstWrapper->gstCapsUnref(caps);
1533 0 : if (oldCaps)
1534 0 : m_gstWrapper->gstCapsUnref(oldCaps);
1535 0 : return false;
1536 : }
1537 5 : std::string sinkName = GST_ELEMENT_NAME(sink);
1538 5 : m_gstWrapper->gstObjectUnref(sink);
1539 :
1540 5 : int sampleAttributes{
1541 : 0}; // rdk_gstreamer_utils::performAudioTrackCodecChannelSwitch checks if this param != NULL only.
1542 5 : std::uint32_t status{0}; // must be 0 to make rdk_gstreamer_utils::performAudioTrackCodecChannelSwitch work
1543 5 : unsigned int ui32Delay{0}; // output param
1544 5 : long long audioChangeTargetPts{-1}; // NOLINT(runtime/int) output param. Set audioChangeTargetPts =
1545 : // currentDispPts in rdk_gstreamer_utils function stub
1546 5 : unsigned int audioChangeStage{0}; // Output param. Set to AUDCHG_ALIGN in rdk_gstreamer_utils function stub
1547 5 : gchar *oldCapsCStr = m_gstWrapper->gstCapsToString(oldCaps);
1548 5 : std::string oldCapsStr = std::string(oldCapsCStr);
1549 5 : m_glibWrapper->gFree(oldCapsCStr);
1550 5 : bool audioAac{oldCapsStr.find("audio/mpeg") != std::string::npos};
1551 5 : bool svpEnabled{true}; // assume always true
1552 5 : bool retVal{false}; // Output param. Set to TRUE in rdk_gstreamer_utils function stub
1553 :
1554 5 : bool result = false;
1555 5 : if (m_glibWrapper->gStrHasPrefix(sinkName.c_str(), "amlhalasink"))
1556 : {
1557 : // due to problems audio codec change in prerolling, temporarily moved the code from rdk gstreamer utils to
1558 : // Rialto and applied fixes
1559 2 : result = performAudioTrackCodecChannelSwitch(&sampleAttributes, &(*audioAttributes), &status, &ui32Delay,
1560 : &audioChangeTargetPts, ¤tDispPts, &audioChangeStage,
1561 2 : &caps, &audioAac, svpEnabled, GST_ELEMENT(appSrc), &retVal);
1562 : }
1563 : else
1564 : {
1565 6 : result = m_rdkGstreamerUtilsWrapper->performAudioTrackCodecChannelSwitch(&m_context.playbackGroup,
1566 : &sampleAttributes,
1567 3 : &(*audioAttributes), &status,
1568 : &ui32Delay, &audioChangeTargetPts,
1569 : ¤tDispPts, &audioChangeStage,
1570 : &caps, &audioAac, svpEnabled,
1571 3 : GST_ELEMENT(appSrc), &retVal);
1572 : }
1573 :
1574 5 : if (!result || !retVal)
1575 : {
1576 3 : RIALTO_SERVER_LOG_WARN("performAudioTrackCodecChannelSwitch failed! Result: %d, retval %d", result, retVal);
1577 : }
1578 5 : }
1579 : else
1580 : {
1581 1 : RIALTO_SERVER_LOG_DEBUG("Skip switching audio source - caps are the same.");
1582 : }
1583 :
1584 6 : m_context.lastAudioSampleTimestamps = currentDispPts;
1585 6 : if (caps)
1586 6 : m_gstWrapper->gstCapsUnref(caps);
1587 6 : if (oldCaps)
1588 6 : m_gstWrapper->gstCapsUnref(oldCaps);
1589 :
1590 6 : return true;
1591 7 : }
1592 :
1593 0 : bool GstGenericPlayer::hasSourceType(const MediaSourceType &mediaSourceType) const
1594 : {
1595 0 : return m_context.streamInfo.find(mediaSourceType) != m_context.streamInfo.end();
1596 : }
1597 :
1598 89 : void GstGenericPlayer::scheduleNeedMediaData(GstAppSrc *src)
1599 : {
1600 89 : if (m_workerThread)
1601 : {
1602 89 : m_workerThread->enqueueTask(m_taskFactory->createNeedData(m_context, *this, src));
1603 : }
1604 : }
1605 :
1606 1 : void GstGenericPlayer::scheduleEnoughData(GstAppSrc *src)
1607 : {
1608 1 : if (m_workerThread)
1609 : {
1610 1 : m_workerThread->enqueueTask(m_taskFactory->createEnoughData(m_context, src));
1611 : }
1612 : }
1613 :
1614 2 : void GstGenericPlayer::scheduleAudioUnderflow()
1615 : {
1616 2 : if (m_workerThread)
1617 : {
1618 2 : bool underflowEnabled = m_context.isPlaying;
1619 4 : m_workerThread->enqueueTask(
1620 4 : m_taskFactory->createUnderflow(m_context, *this, underflowEnabled, MediaSourceType::AUDIO));
1621 : }
1622 2 : }
1623 :
1624 2 : void GstGenericPlayer::scheduleVideoUnderflow()
1625 : {
1626 2 : if (m_workerThread)
1627 : {
1628 2 : bool underflowEnabled = m_context.isPlaying;
1629 4 : m_workerThread->enqueueTask(
1630 4 : m_taskFactory->createUnderflow(m_context, *this, underflowEnabled, MediaSourceType::VIDEO));
1631 : }
1632 2 : }
1633 :
1634 1 : void GstGenericPlayer::scheduleAllSourcesAttached()
1635 : {
1636 1 : allSourcesAttached();
1637 : }
1638 :
1639 14 : void GstGenericPlayer::cancelUnderflow(firebolt::rialto::MediaSourceType mediaSource)
1640 : {
1641 14 : auto elem = m_context.streamInfo.find(mediaSource);
1642 14 : if (elem != m_context.streamInfo.end())
1643 : {
1644 14 : StreamInfo &streamInfo = elem->second;
1645 14 : if (!streamInfo.underflowOccured)
1646 : {
1647 11 : return;
1648 : }
1649 :
1650 3 : RIALTO_SERVER_LOG_DEBUG("Cancelling %s underflow", common::convertMediaSourceType(mediaSource));
1651 3 : streamInfo.underflowOccured = false;
1652 : }
1653 : }
1654 :
1655 3 : void GstGenericPlayer::play(bool &async)
1656 : {
1657 3 : if (0 == m_ongoingStateChangesNumber)
1658 : {
1659 : // Operation called on main thread, because PAUSED->PLAYING change is synchronous and needs to be done fast.
1660 : //
1661 : // m_context.pipeline can be used, because it's modified only in GstGenericPlayer
1662 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
1663 2 : ++m_ongoingStateChangesNumber;
1664 2 : async = (changePipelineState(GST_STATE_PLAYING) == GST_STATE_CHANGE_ASYNC);
1665 2 : RIALTO_SERVER_LOG_MIL("State change to PLAYING requested");
1666 : }
1667 : else
1668 : {
1669 1 : ++m_ongoingStateChangesNumber;
1670 1 : async = true;
1671 1 : if (m_workerThread)
1672 : {
1673 1 : m_workerThread->enqueueTask(m_taskFactory->createPlay(*this));
1674 : }
1675 : }
1676 3 : }
1677 :
1678 2 : void GstGenericPlayer::pause()
1679 : {
1680 2 : ++m_ongoingStateChangesNumber;
1681 2 : if (m_workerThread)
1682 : {
1683 2 : m_workerThread->enqueueTask(m_taskFactory->createPause(m_context, *this));
1684 : }
1685 : }
1686 :
1687 1 : void GstGenericPlayer::stop()
1688 : {
1689 1 : ++m_ongoingStateChangesNumber;
1690 1 : if (m_workerThread)
1691 : {
1692 1 : m_workerThread->enqueueTask(m_taskFactory->createStop(m_context, *this));
1693 : }
1694 : }
1695 :
1696 6 : GstStateChangeReturn GstGenericPlayer::changePipelineState(GstState newState)
1697 : {
1698 6 : if (!m_context.pipeline)
1699 : {
1700 1 : RIALTO_SERVER_LOG_ERROR("Change state failed - pipeline is nullptr");
1701 1 : if (m_gstPlayerClient)
1702 1 : m_gstPlayerClient->notifyPlaybackState(PlaybackState::FAILURE);
1703 1 : --m_ongoingStateChangesNumber;
1704 1 : return GST_STATE_CHANGE_FAILURE;
1705 : }
1706 5 : m_context.flushOnPrerollController->setTargetState(newState);
1707 5 : const GstStateChangeReturn result{m_gstWrapper->gstElementSetState(m_context.pipeline, newState)};
1708 5 : if (result == GST_STATE_CHANGE_FAILURE)
1709 : {
1710 1 : RIALTO_SERVER_LOG_ERROR("Change state failed - Gstreamer returned an error");
1711 1 : if (m_gstPlayerClient)
1712 1 : m_gstPlayerClient->notifyPlaybackState(PlaybackState::FAILURE);
1713 : }
1714 5 : --m_ongoingStateChangesNumber;
1715 5 : return result;
1716 : }
1717 :
1718 17 : int64_t GstGenericPlayer::getPosition(GstElement *element)
1719 : {
1720 17 : if (!element)
1721 : {
1722 1 : RIALTO_SERVER_LOG_WARN("Element is null");
1723 1 : return -1;
1724 : }
1725 :
1726 16 : m_gstWrapper->gstStateLock(element);
1727 :
1728 32 : if (m_gstWrapper->gstElementGetState(element) < GST_STATE_PAUSED ||
1729 16 : (m_gstWrapper->gstElementGetStateReturn(element) == GST_STATE_CHANGE_ASYNC &&
1730 1 : m_gstWrapper->gstElementGetStateNext(element) == GST_STATE_PAUSED))
1731 : {
1732 1 : RIALTO_SERVER_LOG_WARN("Element is prerolling or in invalid state - state: %s, return: %s, next: %s",
1733 : m_gstWrapper->gstElementStateGetName(m_gstWrapper->gstElementGetState(element)),
1734 : m_gstWrapper->gstElementStateChangeReturnGetName(
1735 : m_gstWrapper->gstElementGetStateReturn(element)),
1736 : m_gstWrapper->gstElementStateGetName(m_gstWrapper->gstElementGetStateNext(element)));
1737 :
1738 1 : m_gstWrapper->gstStateUnlock(element);
1739 1 : return -1;
1740 : }
1741 15 : m_gstWrapper->gstStateUnlock(element);
1742 :
1743 15 : gint64 position = -1;
1744 15 : if (!m_gstWrapper->gstElementQueryPosition(m_context.pipeline, GST_FORMAT_TIME, &position))
1745 : {
1746 1 : RIALTO_SERVER_LOG_WARN("Failed to query position");
1747 1 : return -1;
1748 : }
1749 :
1750 14 : return position;
1751 : }
1752 :
1753 1 : void GstGenericPlayer::setVideoGeometry(int x, int y, int width, int height)
1754 : {
1755 1 : if (m_workerThread)
1756 : {
1757 2 : m_workerThread->enqueueTask(
1758 2 : m_taskFactory->createSetVideoGeometry(m_context, *this, Rectangle{x, y, width, height}));
1759 : }
1760 1 : }
1761 :
1762 1 : void GstGenericPlayer::setEos(const firebolt::rialto::MediaSourceType &type)
1763 : {
1764 1 : if (m_workerThread)
1765 : {
1766 1 : m_workerThread->enqueueTask(m_taskFactory->createEos(m_context, *this, type));
1767 : }
1768 : }
1769 :
1770 4 : bool GstGenericPlayer::setVideoSinkRectangle()
1771 : {
1772 4 : bool result = false;
1773 4 : GstElement *videoSink{getSink(MediaSourceType::VIDEO)};
1774 4 : if (videoSink)
1775 : {
1776 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "rectangle"))
1777 : {
1778 : std::string rect =
1779 4 : std::to_string(m_context.pendingGeometry.x) + ',' + std::to_string(m_context.pendingGeometry.y) + ',' +
1780 6 : std::to_string(m_context.pendingGeometry.width) + ',' + std::to_string(m_context.pendingGeometry.height);
1781 2 : m_glibWrapper->gObjectSet(videoSink, "rectangle", rect.c_str(), nullptr);
1782 2 : m_context.pendingGeometry.clear();
1783 2 : result = true;
1784 : }
1785 : else
1786 : {
1787 1 : RIALTO_SERVER_LOG_ERROR("Failed to set the video rectangle");
1788 : }
1789 3 : m_gstWrapper->gstObjectUnref(videoSink);
1790 : }
1791 : else
1792 : {
1793 1 : RIALTO_SERVER_LOG_ERROR("Failed to set video rectangle, sink is NULL");
1794 : }
1795 :
1796 4 : return result;
1797 : }
1798 :
1799 3 : bool GstGenericPlayer::setImmediateOutput()
1800 : {
1801 3 : bool result{false};
1802 3 : if (m_context.pendingImmediateOutputForVideo.has_value())
1803 : {
1804 3 : GstElement *sink{getSink(MediaSourceType::VIDEO)};
1805 3 : if (sink)
1806 : {
1807 2 : bool immediateOutput{m_context.pendingImmediateOutputForVideo.value()};
1808 2 : RIALTO_SERVER_LOG_DEBUG("Set immediate-output to %s", immediateOutput ? "TRUE" : "FALSE");
1809 :
1810 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "immediate-output"))
1811 : {
1812 1 : gboolean immediateOutputGboolean{immediateOutput ? TRUE : FALSE};
1813 1 : m_glibWrapper->gObjectSet(sink, "immediate-output", immediateOutputGboolean, nullptr);
1814 1 : result = true;
1815 : }
1816 : else
1817 : {
1818 1 : RIALTO_SERVER_LOG_ERROR("Failed to set immediate-output property on sink '%s'", GST_ELEMENT_NAME(sink));
1819 : }
1820 2 : m_context.pendingImmediateOutputForVideo.reset();
1821 2 : m_gstWrapper->gstObjectUnref(sink);
1822 : }
1823 : else
1824 : {
1825 1 : RIALTO_SERVER_LOG_DEBUG("Pending an immediate-output, sink is NULL");
1826 : }
1827 : }
1828 3 : return result;
1829 : }
1830 :
1831 4 : bool GstGenericPlayer::setShowVideoWindow()
1832 : {
1833 4 : if (!m_context.pendingShowVideoWindow.has_value())
1834 : {
1835 1 : RIALTO_SERVER_LOG_WARN("No show video window value to be set. Aborting...");
1836 1 : return false;
1837 : }
1838 :
1839 3 : GstElement *videoSink{getSink(MediaSourceType::VIDEO)};
1840 3 : if (!videoSink)
1841 : {
1842 1 : RIALTO_SERVER_LOG_DEBUG("Setting show video window queued. Video sink is NULL");
1843 1 : return false;
1844 : }
1845 2 : bool result{false};
1846 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "show-video-window"))
1847 : {
1848 1 : m_glibWrapper->gObjectSet(videoSink, "show-video-window", m_context.pendingShowVideoWindow.value(), nullptr);
1849 1 : result = true;
1850 : }
1851 : else
1852 : {
1853 1 : RIALTO_SERVER_LOG_ERROR("Setting show video window failed. Property does not exist");
1854 : }
1855 2 : m_context.pendingShowVideoWindow.reset();
1856 2 : m_gstWrapper->gstObjectUnref(GST_OBJECT(videoSink));
1857 2 : return result;
1858 : }
1859 :
1860 4 : bool GstGenericPlayer::setLowLatency()
1861 : {
1862 4 : bool result{false};
1863 4 : if (m_context.pendingLowLatency.has_value())
1864 : {
1865 4 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1866 4 : if (sink)
1867 : {
1868 3 : bool lowLatency{m_context.pendingLowLatency.value()};
1869 3 : RIALTO_SERVER_LOG_DEBUG("Set low-latency to %s", lowLatency ? "TRUE" : "FALSE");
1870 :
1871 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "low-latency"))
1872 : {
1873 2 : gboolean lowLatencyGboolean{lowLatency ? TRUE : FALSE};
1874 2 : m_glibWrapper->gObjectSet(sink, "low-latency", lowLatencyGboolean, nullptr);
1875 2 : result = true;
1876 : }
1877 : else
1878 : {
1879 1 : RIALTO_SERVER_LOG_ERROR("Failed to set low-latency property on sink '%s'", GST_ELEMENT_NAME(sink));
1880 : }
1881 3 : m_context.pendingLowLatency.reset();
1882 3 : m_gstWrapper->gstObjectUnref(sink);
1883 : }
1884 : else
1885 : {
1886 1 : RIALTO_SERVER_LOG_DEBUG("Pending low-latency, sink is NULL");
1887 : }
1888 : }
1889 4 : return result;
1890 : }
1891 :
1892 3 : bool GstGenericPlayer::setSync()
1893 : {
1894 3 : bool result{false};
1895 3 : if (m_context.pendingSync.has_value())
1896 : {
1897 3 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1898 3 : if (sink)
1899 : {
1900 2 : bool sync{m_context.pendingSync.value()};
1901 2 : RIALTO_SERVER_LOG_DEBUG("Set sync to %s", sync ? "TRUE" : "FALSE");
1902 :
1903 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "sync"))
1904 : {
1905 1 : gboolean syncGboolean{sync ? TRUE : FALSE};
1906 1 : m_glibWrapper->gObjectSet(sink, "sync", syncGboolean, nullptr);
1907 1 : result = true;
1908 : }
1909 : else
1910 : {
1911 1 : RIALTO_SERVER_LOG_ERROR("Failed to set sync property on sink '%s'", GST_ELEMENT_NAME(sink));
1912 : }
1913 2 : m_context.pendingSync.reset();
1914 2 : m_gstWrapper->gstObjectUnref(sink);
1915 : }
1916 : else
1917 : {
1918 1 : RIALTO_SERVER_LOG_DEBUG("Pending sync, sink is NULL");
1919 : }
1920 : }
1921 3 : return result;
1922 : }
1923 :
1924 3 : bool GstGenericPlayer::setSyncOff()
1925 : {
1926 3 : bool result{false};
1927 3 : if (m_context.pendingSyncOff.has_value())
1928 : {
1929 3 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1930 3 : if (decoder)
1931 : {
1932 2 : bool syncOff{m_context.pendingSyncOff.value()};
1933 2 : RIALTO_SERVER_LOG_DEBUG("Set sync-off to %s", syncOff ? "TRUE" : "FALSE");
1934 :
1935 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "sync-off"))
1936 : {
1937 1 : gboolean syncOffGboolean{decoder ? TRUE : FALSE};
1938 1 : m_glibWrapper->gObjectSet(decoder, "sync-off", syncOffGboolean, nullptr);
1939 1 : result = true;
1940 : }
1941 : else
1942 : {
1943 1 : RIALTO_SERVER_LOG_ERROR("Failed to set sync-off property on decoder '%s'", GST_ELEMENT_NAME(decoder));
1944 : }
1945 2 : m_context.pendingSyncOff.reset();
1946 2 : m_gstWrapper->gstObjectUnref(decoder);
1947 : }
1948 : else
1949 : {
1950 1 : RIALTO_SERVER_LOG_DEBUG("Pending sync-off, decoder is NULL");
1951 : }
1952 : }
1953 3 : return result;
1954 : }
1955 :
1956 6 : bool GstGenericPlayer::setStreamSyncMode(const MediaSourceType &type)
1957 : {
1958 6 : bool result{false};
1959 6 : int32_t streamSyncMode{0};
1960 : {
1961 6 : std::unique_lock lock{m_context.propertyMutex};
1962 6 : if (m_context.pendingStreamSyncMode.find(type) == m_context.pendingStreamSyncMode.end())
1963 : {
1964 0 : return false;
1965 : }
1966 6 : streamSyncMode = m_context.pendingStreamSyncMode[type];
1967 : }
1968 6 : if (MediaSourceType::AUDIO == type)
1969 : {
1970 3 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1971 3 : if (!decoder)
1972 : {
1973 1 : RIALTO_SERVER_LOG_DEBUG("Pending stream-sync-mode, decoder is NULL");
1974 1 : return false;
1975 : }
1976 :
1977 2 : RIALTO_SERVER_LOG_DEBUG("Set stream-sync-mode to %d", streamSyncMode);
1978 :
1979 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "stream-sync-mode"))
1980 : {
1981 1 : gint streamSyncModeGint{static_cast<gint>(streamSyncMode)};
1982 1 : m_glibWrapper->gObjectSet(decoder, "stream-sync-mode", streamSyncModeGint, nullptr);
1983 1 : result = true;
1984 : }
1985 : else
1986 : {
1987 1 : RIALTO_SERVER_LOG_ERROR("Failed to set stream-sync-mode property on decoder '%s'", GST_ELEMENT_NAME(decoder));
1988 : }
1989 2 : m_gstWrapper->gstObjectUnref(decoder);
1990 2 : std::unique_lock lock{m_context.propertyMutex};
1991 2 : m_context.pendingStreamSyncMode.erase(type);
1992 : }
1993 3 : else if (MediaSourceType::VIDEO == type)
1994 : {
1995 3 : GstElement *parser = getParser(MediaSourceType::VIDEO);
1996 3 : if (!parser)
1997 : {
1998 1 : RIALTO_SERVER_LOG_DEBUG("Pending syncmode-streaming, parser is NULL");
1999 1 : return false;
2000 : }
2001 :
2002 2 : gboolean streamSyncModeBoolean{static_cast<gboolean>(streamSyncMode)};
2003 2 : RIALTO_SERVER_LOG_DEBUG("Set syncmode-streaming to %d", streamSyncMode);
2004 :
2005 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(parser), "syncmode-streaming"))
2006 : {
2007 1 : m_glibWrapper->gObjectSet(parser, "syncmode-streaming", streamSyncModeBoolean, nullptr);
2008 1 : result = true;
2009 : }
2010 : else
2011 : {
2012 1 : RIALTO_SERVER_LOG_ERROR("Failed to set syncmode-streaming property on parser '%s'", GST_ELEMENT_NAME(parser));
2013 : }
2014 2 : m_gstWrapper->gstObjectUnref(parser);
2015 2 : std::unique_lock lock{m_context.propertyMutex};
2016 2 : m_context.pendingStreamSyncMode.erase(type);
2017 : }
2018 4 : return result;
2019 : }
2020 :
2021 3 : bool GstGenericPlayer::setRenderFrame()
2022 : {
2023 3 : bool result{false};
2024 3 : if (m_context.pendingRenderFrame)
2025 : {
2026 5 : static const std::string kStepOnPrerollPropertyName = "frame-step-on-preroll";
2027 3 : GstElement *sink{getSink(MediaSourceType::VIDEO)};
2028 3 : if (sink)
2029 : {
2030 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), kStepOnPrerollPropertyName.c_str()))
2031 : {
2032 1 : RIALTO_SERVER_LOG_INFO("Rendering preroll");
2033 :
2034 1 : m_glibWrapper->gObjectSet(sink, kStepOnPrerollPropertyName.c_str(), 1, nullptr);
2035 1 : m_gstWrapper->gstElementSendEvent(sink, m_gstWrapper->gstEventNewStep(GST_FORMAT_BUFFERS, 1, 1.0, true,
2036 : false));
2037 1 : m_glibWrapper->gObjectSet(sink, kStepOnPrerollPropertyName.c_str(), 0, nullptr);
2038 1 : result = true;
2039 : }
2040 : else
2041 : {
2042 1 : RIALTO_SERVER_LOG_ERROR("Video sink doesn't have property `%s`", kStepOnPrerollPropertyName.c_str());
2043 : }
2044 2 : m_gstWrapper->gstObjectUnref(sink);
2045 2 : m_context.pendingRenderFrame = false;
2046 : }
2047 : else
2048 : {
2049 1 : RIALTO_SERVER_LOG_DEBUG("Pending render frame, sink is NULL");
2050 : }
2051 : }
2052 3 : return result;
2053 : }
2054 :
2055 3 : bool GstGenericPlayer::setBufferingLimit()
2056 : {
2057 3 : bool result{false};
2058 3 : guint bufferingLimit{0};
2059 : {
2060 3 : std::unique_lock lock{m_context.propertyMutex};
2061 3 : if (!m_context.pendingBufferingLimit.has_value())
2062 : {
2063 0 : return false;
2064 : }
2065 3 : bufferingLimit = static_cast<guint>(m_context.pendingBufferingLimit.value());
2066 : }
2067 :
2068 3 : GstElement *decoder{getDecoder(MediaSourceType::AUDIO)};
2069 3 : if (decoder)
2070 : {
2071 2 : RIALTO_SERVER_LOG_DEBUG("Set limit-buffering-ms to %u", bufferingLimit);
2072 :
2073 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "limit-buffering-ms"))
2074 : {
2075 1 : m_glibWrapper->gObjectSet(decoder, "limit-buffering-ms", bufferingLimit, nullptr);
2076 1 : result = true;
2077 : }
2078 : else
2079 : {
2080 1 : RIALTO_SERVER_LOG_ERROR("Failed to set limit-buffering-ms property on decoder '%s'",
2081 : GST_ELEMENT_NAME(decoder));
2082 : }
2083 2 : m_gstWrapper->gstObjectUnref(decoder);
2084 2 : std::unique_lock lock{m_context.propertyMutex};
2085 2 : m_context.pendingBufferingLimit.reset();
2086 : }
2087 : else
2088 : {
2089 1 : RIALTO_SERVER_LOG_DEBUG("Pending limit-buffering-ms, decoder is NULL");
2090 : }
2091 3 : return result;
2092 : }
2093 :
2094 2 : bool GstGenericPlayer::setUseBuffering()
2095 : {
2096 2 : std::unique_lock lock{m_context.propertyMutex};
2097 2 : if (m_context.pendingUseBuffering.has_value())
2098 : {
2099 2 : if (m_context.playbackGroup.m_curAudioDecodeBin)
2100 : {
2101 1 : gboolean useBufferingGboolean{m_context.pendingUseBuffering.value() ? TRUE : FALSE};
2102 1 : RIALTO_SERVER_LOG_DEBUG("Set use-buffering to %d", useBufferingGboolean);
2103 1 : m_glibWrapper->gObjectSet(m_context.playbackGroup.m_curAudioDecodeBin, "use-buffering",
2104 : useBufferingGboolean, nullptr);
2105 1 : m_context.pendingUseBuffering.reset();
2106 1 : return true;
2107 : }
2108 : else
2109 : {
2110 1 : RIALTO_SERVER_LOG_DEBUG("Pending use-buffering, decodebin is NULL");
2111 : }
2112 : }
2113 1 : return false;
2114 2 : }
2115 :
2116 8 : bool GstGenericPlayer::setWesterossinkSecondaryVideo()
2117 : {
2118 8 : bool result = false;
2119 8 : GstElementFactory *factory = m_gstWrapper->gstElementFactoryFind("westerossink");
2120 8 : if (factory)
2121 : {
2122 7 : GstElement *videoSink = m_gstWrapper->gstElementFactoryCreate(factory, nullptr);
2123 7 : if (videoSink)
2124 : {
2125 5 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "res-usage"))
2126 : {
2127 4 : m_glibWrapper->gObjectSet(videoSink, "res-usage", 0x0u, nullptr);
2128 4 : m_glibWrapper->gObjectSet(m_context.pipeline, "video-sink", videoSink, nullptr);
2129 4 : result = true;
2130 : }
2131 : else
2132 : {
2133 1 : RIALTO_SERVER_LOG_ERROR("Failed to set the westerossink res-usage");
2134 1 : m_gstWrapper->gstObjectUnref(GST_OBJECT(videoSink));
2135 : }
2136 : }
2137 : else
2138 : {
2139 2 : RIALTO_SERVER_LOG_ERROR("Failed to create the westerossink");
2140 : }
2141 :
2142 7 : m_gstWrapper->gstObjectUnref(GST_OBJECT(factory));
2143 : }
2144 : else
2145 : {
2146 : // No westeros sink
2147 1 : result = true;
2148 : }
2149 :
2150 8 : return result;
2151 : }
2152 :
2153 8 : bool GstGenericPlayer::setErmContext()
2154 : {
2155 8 : bool result = false;
2156 8 : GstContext *context = m_gstWrapper->gstContextNew("erm", false);
2157 8 : if (context)
2158 : {
2159 6 : GstStructure *contextStructure = m_gstWrapper->gstContextWritableStructure(context);
2160 6 : if (contextStructure)
2161 : {
2162 5 : m_gstWrapper->gstStructureSet(contextStructure, "res-usage", G_TYPE_UINT, 0x0u, nullptr);
2163 5 : m_gstWrapper->gstElementSetContext(GST_ELEMENT(m_context.pipeline), context);
2164 5 : result = true;
2165 : }
2166 : else
2167 : {
2168 1 : RIALTO_SERVER_LOG_ERROR("Failed to create the erm structure");
2169 : }
2170 6 : m_gstWrapper->gstContextUnref(context);
2171 : }
2172 : else
2173 : {
2174 2 : RIALTO_SERVER_LOG_ERROR("Failed to create the erm context");
2175 : }
2176 :
2177 8 : return result;
2178 : }
2179 :
2180 6 : void GstGenericPlayer::startPositionReportingAndCheckAudioUnderflowTimer()
2181 : {
2182 6 : if (m_positionReportingAndCheckAudioUnderflowTimer && m_positionReportingAndCheckAudioUnderflowTimer->isActive())
2183 : {
2184 1 : return;
2185 : }
2186 :
2187 15 : m_positionReportingAndCheckAudioUnderflowTimer = m_timerFactory->createTimer(
2188 : kPositionReportTimerMs,
2189 10 : [this]()
2190 : {
2191 1 : if (m_workerThread)
2192 : {
2193 1 : m_workerThread->enqueueTask(m_taskFactory->createReportPosition(m_context, *this));
2194 1 : m_workerThread->enqueueTask(m_taskFactory->createCheckAudioUnderflow(m_context, *this));
2195 : }
2196 1 : },
2197 5 : firebolt::rialto::common::TimerType::PERIODIC);
2198 : }
2199 :
2200 4 : void GstGenericPlayer::stopPositionReportingAndCheckAudioUnderflowTimer()
2201 : {
2202 4 : if (m_positionReportingAndCheckAudioUnderflowTimer && m_positionReportingAndCheckAudioUnderflowTimer->isActive())
2203 : {
2204 1 : m_positionReportingAndCheckAudioUnderflowTimer->cancel();
2205 1 : m_positionReportingAndCheckAudioUnderflowTimer.reset();
2206 : }
2207 4 : }
2208 :
2209 6 : void GstGenericPlayer::startNotifyPlaybackInfoTimer()
2210 : {
2211 : static constexpr std::chrono::milliseconds kPlaybackInfoTimerMs{32};
2212 6 : if (m_playbackInfoTimer && m_playbackInfoTimer->isActive())
2213 : {
2214 1 : return;
2215 : }
2216 :
2217 5 : notifyPlaybackInfo();
2218 :
2219 : m_playbackInfoTimer =
2220 5 : m_timerFactory
2221 6 : ->createTimer(kPlaybackInfoTimerMs, [this]() { notifyPlaybackInfo(); }, firebolt::rialto::common::TimerType::PERIODIC);
2222 : }
2223 :
2224 3 : void GstGenericPlayer::stopNotifyPlaybackInfoTimer()
2225 : {
2226 3 : if (m_playbackInfoTimer && m_playbackInfoTimer->isActive())
2227 : {
2228 1 : m_playbackInfoTimer->cancel();
2229 1 : m_playbackInfoTimer.reset();
2230 : }
2231 3 : }
2232 :
2233 0 : void GstGenericPlayer::startSubtitleClockResyncTimer()
2234 : {
2235 0 : if (m_subtitleClockResyncTimer && m_subtitleClockResyncTimer->isActive())
2236 : {
2237 0 : return;
2238 : }
2239 :
2240 0 : m_subtitleClockResyncTimer = m_timerFactory->createTimer(
2241 : kSubtitleClockResyncInterval,
2242 0 : [this]()
2243 : {
2244 0 : if (m_workerThread)
2245 : {
2246 0 : m_workerThread->enqueueTask(m_taskFactory->createSynchroniseSubtitleClock(m_context, *this));
2247 : }
2248 0 : },
2249 0 : firebolt::rialto::common::TimerType::PERIODIC);
2250 : }
2251 :
2252 0 : void GstGenericPlayer::stopSubtitleClockResyncTimer()
2253 : {
2254 0 : if (m_subtitleClockResyncTimer && m_subtitleClockResyncTimer->isActive())
2255 : {
2256 0 : m_subtitleClockResyncTimer->cancel();
2257 0 : m_subtitleClockResyncTimer.reset();
2258 : }
2259 : }
2260 :
2261 2 : void GstGenericPlayer::stopWorkerThread()
2262 : {
2263 2 : if (m_workerThread)
2264 : {
2265 2 : m_workerThread->stop();
2266 : }
2267 : }
2268 :
2269 0 : void GstGenericPlayer::setPendingPlaybackRate()
2270 : {
2271 0 : RIALTO_SERVER_LOG_INFO("Setting pending playback rate");
2272 0 : setPlaybackRate(m_context.pendingPlaybackRate);
2273 : }
2274 :
2275 1 : void GstGenericPlayer::renderFrame()
2276 : {
2277 1 : if (m_workerThread)
2278 : {
2279 1 : m_workerThread->enqueueTask(m_taskFactory->createRenderFrame(m_context, *this));
2280 : }
2281 : }
2282 :
2283 18 : void GstGenericPlayer::setVolume(double targetVolume, uint32_t volumeDuration, firebolt::rialto::EaseType easeType)
2284 : {
2285 18 : if (m_workerThread)
2286 : {
2287 36 : m_workerThread->enqueueTask(
2288 36 : m_taskFactory->createSetVolume(m_context, *this, targetVolume, volumeDuration, easeType));
2289 : }
2290 18 : }
2291 :
2292 9 : bool GstGenericPlayer::getVolume(double ¤tVolume)
2293 : {
2294 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
2295 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
2296 9 : if (!m_context.pipeline)
2297 : {
2298 0 : return false;
2299 : }
2300 :
2301 : // NOTE: No gstreamer documentation for "fade-volume" could be found at the time this code was written.
2302 : // Therefore the author performed several tests on a supported platform (Flex2) to determine the behaviour of this property.
2303 : // The code has been written to be backwardly compatible on platforms that don't have this property.
2304 : // The observed behaviour was:
2305 : // - if the returned fade volume is negative then audio-fade is not active. In this case the usual technique
2306 : // to find volume in the pipeline works and is used.
2307 : // - if the returned fade volume is positive then audio-fade is active. In this case the returned fade volume
2308 : // directly returns the current volume level 0=min to 100=max (and the pipeline's current volume level is
2309 : // meaningless and doesn't contribute in this case).
2310 9 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
2311 11 : if (m_context.audioFadeEnabled && sink &&
2312 2 : m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "fade-volume"))
2313 : {
2314 2 : gint fadeVolume{-100};
2315 2 : m_glibWrapper->gObjectGet(sink, "fade-volume", &fadeVolume, NULL);
2316 2 : if (fadeVolume < 0)
2317 : {
2318 1 : currentVolume = m_gstWrapper->gstStreamVolumeGetVolume(GST_STREAM_VOLUME(m_context.pipeline),
2319 : GST_STREAM_VOLUME_FORMAT_LINEAR);
2320 1 : RIALTO_SERVER_LOG_INFO("Fade volume is negative, using volume from pipeline: %f", currentVolume);
2321 : }
2322 : else
2323 : {
2324 1 : currentVolume = static_cast<double>(fadeVolume) / 100.0;
2325 1 : RIALTO_SERVER_LOG_INFO("Fade volume is supported: %f", currentVolume);
2326 : }
2327 : }
2328 : else
2329 : {
2330 7 : currentVolume = m_gstWrapper->gstStreamVolumeGetVolume(GST_STREAM_VOLUME(m_context.pipeline),
2331 : GST_STREAM_VOLUME_FORMAT_LINEAR);
2332 7 : RIALTO_SERVER_LOG_INFO("Fade volume is not supported, using volume from pipeline: %f", currentVolume);
2333 : }
2334 :
2335 9 : if (sink)
2336 2 : m_gstWrapper->gstObjectUnref(sink);
2337 :
2338 9 : return true;
2339 : }
2340 :
2341 1 : void GstGenericPlayer::setMute(const MediaSourceType &mediaSourceType, bool mute)
2342 : {
2343 1 : if (m_workerThread)
2344 : {
2345 1 : m_workerThread->enqueueTask(m_taskFactory->createSetMute(m_context, *this, mediaSourceType, mute));
2346 : }
2347 : }
2348 :
2349 5 : bool GstGenericPlayer::getMute(const MediaSourceType &mediaSourceType, bool &mute)
2350 : {
2351 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
2352 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
2353 5 : if (mediaSourceType == MediaSourceType::SUBTITLE)
2354 : {
2355 2 : if (!m_context.subtitleSink)
2356 : {
2357 1 : RIALTO_SERVER_LOG_ERROR("There is no subtitle sink");
2358 1 : return false;
2359 : }
2360 1 : gboolean muteValue{FALSE};
2361 1 : m_glibWrapper->gObjectGet(m_context.subtitleSink, "mute", &muteValue, nullptr);
2362 1 : mute = muteValue;
2363 : }
2364 3 : else if (mediaSourceType == MediaSourceType::AUDIO)
2365 : {
2366 2 : if (!m_context.pipeline)
2367 : {
2368 1 : return false;
2369 : }
2370 1 : mute = m_gstWrapper->gstStreamVolumeGetMute(GST_STREAM_VOLUME(m_context.pipeline));
2371 : }
2372 : else
2373 : {
2374 1 : RIALTO_SERVER_LOG_ERROR("Getting mute for type %s unsupported", common::convertMediaSourceType(mediaSourceType));
2375 1 : return false;
2376 : }
2377 :
2378 2 : return true;
2379 : }
2380 :
2381 2 : bool GstGenericPlayer::isAsync(const MediaSourceType &mediaSourceType) const
2382 : {
2383 2 : GstElement *sink = getSink(mediaSourceType);
2384 2 : if (!sink)
2385 : {
2386 0 : RIALTO_SERVER_LOG_WARN("Sink not found for %s", common::convertMediaSourceType(mediaSourceType));
2387 0 : return true; // Our sinks are async by default
2388 : }
2389 2 : gboolean returnValue{TRUE};
2390 2 : m_glibWrapper->gObjectGet(sink, "async", &returnValue, nullptr);
2391 2 : m_gstWrapper->gstObjectUnref(sink);
2392 2 : return returnValue == TRUE;
2393 : }
2394 :
2395 1 : void GstGenericPlayer::setTextTrackIdentifier(const std::string &textTrackIdentifier)
2396 : {
2397 1 : if (m_workerThread)
2398 : {
2399 1 : m_workerThread->enqueueTask(m_taskFactory->createSetTextTrackIdentifier(m_context, textTrackIdentifier));
2400 : }
2401 : }
2402 :
2403 3 : bool GstGenericPlayer::getTextTrackIdentifier(std::string &textTrackIdentifier)
2404 : {
2405 3 : if (!m_context.subtitleSink)
2406 : {
2407 1 : RIALTO_SERVER_LOG_ERROR("There is no subtitle sink");
2408 1 : return false;
2409 : }
2410 :
2411 2 : gchar *identifier = nullptr;
2412 2 : m_glibWrapper->gObjectGet(m_context.subtitleSink, "text-track-identifier", &identifier, nullptr);
2413 :
2414 2 : if (identifier)
2415 : {
2416 1 : textTrackIdentifier = identifier;
2417 1 : m_glibWrapper->gFree(identifier);
2418 1 : return true;
2419 : }
2420 : else
2421 : {
2422 1 : RIALTO_SERVER_LOG_ERROR("Failed to get text track identifier");
2423 1 : return false;
2424 : }
2425 : }
2426 :
2427 1 : bool GstGenericPlayer::setLowLatency(bool lowLatency)
2428 : {
2429 1 : if (m_workerThread)
2430 : {
2431 1 : m_workerThread->enqueueTask(m_taskFactory->createSetLowLatency(m_context, *this, lowLatency));
2432 : }
2433 1 : return true;
2434 : }
2435 :
2436 1 : bool GstGenericPlayer::setSync(bool sync)
2437 : {
2438 1 : if (m_workerThread)
2439 : {
2440 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSync(m_context, *this, sync));
2441 : }
2442 1 : return true;
2443 : }
2444 :
2445 4 : bool GstGenericPlayer::getSync(bool &sync)
2446 : {
2447 4 : bool returnValue{false};
2448 4 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
2449 4 : if (sink)
2450 : {
2451 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "sync"))
2452 : {
2453 1 : m_glibWrapper->gObjectGet(sink, "sync", &sync, nullptr);
2454 1 : returnValue = true;
2455 : }
2456 : else
2457 : {
2458 1 : RIALTO_SERVER_LOG_ERROR("Sync not supported in sink '%s'", GST_ELEMENT_NAME(sink));
2459 : }
2460 2 : m_gstWrapper->gstObjectUnref(sink);
2461 : }
2462 2 : else if (m_context.pendingSync.has_value())
2463 : {
2464 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
2465 1 : sync = m_context.pendingSync.value();
2466 1 : returnValue = true;
2467 : }
2468 : else
2469 : {
2470 : // We dont know the default setting on the sync, so return failure here
2471 1 : RIALTO_SERVER_LOG_WARN("No audio sink attached or queued value");
2472 : }
2473 :
2474 4 : return returnValue;
2475 : }
2476 :
2477 1 : bool GstGenericPlayer::setSyncOff(bool syncOff)
2478 : {
2479 1 : if (m_workerThread)
2480 : {
2481 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSyncOff(m_context, *this, syncOff));
2482 : }
2483 1 : return true;
2484 : }
2485 :
2486 1 : bool GstGenericPlayer::setStreamSyncMode(const MediaSourceType &mediaSourceType, int32_t streamSyncMode)
2487 : {
2488 1 : if (m_workerThread)
2489 : {
2490 2 : m_workerThread->enqueueTask(
2491 2 : m_taskFactory->createSetStreamSyncMode(m_context, *this, mediaSourceType, streamSyncMode));
2492 : }
2493 1 : return true;
2494 : }
2495 :
2496 5 : bool GstGenericPlayer::getStreamSyncMode(int32_t &streamSyncMode)
2497 : {
2498 5 : bool returnValue{false};
2499 5 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
2500 5 : if (decoder && m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "stream-sync-mode"))
2501 : {
2502 2 : m_glibWrapper->gObjectGet(decoder, "stream-sync-mode", &streamSyncMode, nullptr);
2503 2 : returnValue = true;
2504 : }
2505 : else
2506 : {
2507 3 : std::unique_lock lock{m_context.propertyMutex};
2508 3 : if (m_context.pendingStreamSyncMode.find(MediaSourceType::AUDIO) != m_context.pendingStreamSyncMode.end())
2509 : {
2510 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
2511 1 : streamSyncMode = m_context.pendingStreamSyncMode[MediaSourceType::AUDIO];
2512 1 : returnValue = true;
2513 : }
2514 : else
2515 : {
2516 2 : RIALTO_SERVER_LOG_ERROR("Stream sync mode not supported in decoder '%s'",
2517 : (decoder ? GST_ELEMENT_NAME(decoder) : "null"));
2518 : }
2519 3 : }
2520 :
2521 5 : if (decoder)
2522 3 : m_gstWrapper->gstObjectUnref(GST_OBJECT(decoder));
2523 :
2524 5 : return returnValue;
2525 : }
2526 :
2527 1 : void GstGenericPlayer::ping(std::unique_ptr<IHeartbeatHandler> &&heartbeatHandler)
2528 : {
2529 1 : if (m_workerThread)
2530 : {
2531 1 : m_workerThread->enqueueTask(m_taskFactory->createPing(std::move(heartbeatHandler)));
2532 : }
2533 : }
2534 :
2535 2 : void GstGenericPlayer::flush(const MediaSourceType &mediaSourceType, bool resetTime, bool &async)
2536 : {
2537 2 : if (m_workerThread)
2538 : {
2539 2 : async = isAsync(mediaSourceType);
2540 2 : m_flushWatcher->setFlushing(mediaSourceType, async);
2541 2 : m_workerThread->enqueueTask(m_taskFactory->createFlush(m_context, *this, mediaSourceType, resetTime, async));
2542 : }
2543 : }
2544 :
2545 1 : void GstGenericPlayer::setSourcePosition(const MediaSourceType &mediaSourceType, int64_t position, bool resetTime,
2546 : double appliedRate, uint64_t stopPosition)
2547 : {
2548 1 : if (m_workerThread)
2549 : {
2550 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSourcePosition(m_context, mediaSourceType, position,
2551 : resetTime, appliedRate, stopPosition));
2552 : }
2553 : }
2554 :
2555 0 : void GstGenericPlayer::setSubtitleOffset(int64_t position)
2556 : {
2557 0 : if (m_workerThread)
2558 : {
2559 0 : m_workerThread->enqueueTask(m_taskFactory->createSetSubtitleOffset(m_context, position));
2560 : }
2561 : }
2562 :
2563 1 : void GstGenericPlayer::processAudioGap(int64_t position, uint32_t duration, int64_t discontinuityGap, bool audioAac)
2564 : {
2565 1 : if (m_workerThread)
2566 : {
2567 2 : m_workerThread->enqueueTask(
2568 2 : m_taskFactory->createProcessAudioGap(m_context, position, duration, discontinuityGap, audioAac));
2569 : }
2570 1 : }
2571 :
2572 1 : void GstGenericPlayer::setBufferingLimit(uint32_t limitBufferingMs)
2573 : {
2574 1 : if (m_workerThread)
2575 : {
2576 1 : m_workerThread->enqueueTask(m_taskFactory->createSetBufferingLimit(m_context, *this, limitBufferingMs));
2577 : }
2578 : }
2579 :
2580 5 : bool GstGenericPlayer::getBufferingLimit(uint32_t &limitBufferingMs)
2581 : {
2582 5 : bool returnValue{false};
2583 5 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
2584 5 : if (decoder && m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "limit-buffering-ms"))
2585 : {
2586 2 : m_glibWrapper->gObjectGet(decoder, "limit-buffering-ms", &limitBufferingMs, nullptr);
2587 2 : returnValue = true;
2588 : }
2589 : else
2590 : {
2591 3 : std::unique_lock lock{m_context.propertyMutex};
2592 3 : if (m_context.pendingBufferingLimit.has_value())
2593 : {
2594 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
2595 1 : limitBufferingMs = m_context.pendingBufferingLimit.value();
2596 1 : returnValue = true;
2597 : }
2598 : else
2599 : {
2600 2 : RIALTO_SERVER_LOG_ERROR("buffering limit not supported in decoder '%s'",
2601 : (decoder ? GST_ELEMENT_NAME(decoder) : "null"));
2602 : }
2603 3 : }
2604 :
2605 5 : if (decoder)
2606 3 : m_gstWrapper->gstObjectUnref(GST_OBJECT(decoder));
2607 :
2608 5 : return returnValue;
2609 : }
2610 :
2611 1 : void GstGenericPlayer::setUseBuffering(bool useBuffering)
2612 : {
2613 1 : if (m_workerThread)
2614 : {
2615 1 : m_workerThread->enqueueTask(m_taskFactory->createSetUseBuffering(m_context, *this, useBuffering));
2616 : }
2617 : }
2618 :
2619 3 : bool GstGenericPlayer::getUseBuffering(bool &useBuffering)
2620 : {
2621 3 : if (m_context.playbackGroup.m_curAudioDecodeBin)
2622 : {
2623 1 : m_glibWrapper->gObjectGet(m_context.playbackGroup.m_curAudioDecodeBin, "use-buffering", &useBuffering, nullptr);
2624 1 : return true;
2625 : }
2626 : else
2627 : {
2628 2 : std::unique_lock lock{m_context.propertyMutex};
2629 2 : if (m_context.pendingUseBuffering.has_value())
2630 : {
2631 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
2632 1 : useBuffering = m_context.pendingUseBuffering.value();
2633 1 : return true;
2634 : }
2635 2 : }
2636 1 : return false;
2637 : }
2638 :
2639 1 : void GstGenericPlayer::switchSource(const std::unique_ptr<IMediaPipeline::MediaSource> &mediaSource)
2640 : {
2641 1 : if (m_workerThread)
2642 : {
2643 1 : m_workerThread->enqueueTask(m_taskFactory->createSwitchSource(*this, mediaSource));
2644 : }
2645 : }
2646 :
2647 1 : void GstGenericPlayer::handleBusMessage(GstMessage *message)
2648 : {
2649 1 : m_workerThread->enqueueTask(m_taskFactory->createHandleBusMessage(m_context, *this, message, *m_flushWatcher));
2650 : }
2651 :
2652 1 : void GstGenericPlayer::updatePlaybackGroup(GstElement *typefind, const GstCaps *caps)
2653 : {
2654 1 : m_workerThread->enqueueTask(m_taskFactory->createUpdatePlaybackGroup(m_context, *this, typefind, caps));
2655 : }
2656 :
2657 3 : void GstGenericPlayer::addAutoVideoSinkChild(GObject *object)
2658 : {
2659 : // Only add children that are sinks
2660 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2661 : {
2662 2 : RIALTO_SERVER_LOG_DEBUG("Store AutoVideoSink child sink");
2663 :
2664 2 : if (m_context.autoVideoChildSink && m_context.autoVideoChildSink != GST_ELEMENT(object))
2665 : {
2666 1 : RIALTO_SERVER_LOG_MIL("AutoVideoSink child is been overwritten");
2667 : }
2668 2 : m_context.autoVideoChildSink = GST_ELEMENT(object);
2669 : }
2670 3 : }
2671 :
2672 3 : void GstGenericPlayer::addAutoAudioSinkChild(GObject *object)
2673 : {
2674 : // Only add children that are sinks
2675 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2676 : {
2677 2 : RIALTO_SERVER_LOG_DEBUG("Store AutoAudioSink child sink");
2678 :
2679 2 : if (m_context.autoAudioChildSink && m_context.autoAudioChildSink != GST_ELEMENT(object))
2680 : {
2681 1 : RIALTO_SERVER_LOG_MIL("AutoAudioSink child is been overwritten");
2682 : }
2683 2 : m_context.autoAudioChildSink = GST_ELEMENT(object);
2684 : }
2685 3 : }
2686 :
2687 3 : void GstGenericPlayer::removeAutoVideoSinkChild(GObject *object)
2688 : {
2689 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2690 : {
2691 3 : RIALTO_SERVER_LOG_DEBUG("Remove AutoVideoSink child sink");
2692 :
2693 3 : if (m_context.autoVideoChildSink && m_context.autoVideoChildSink != GST_ELEMENT(object))
2694 : {
2695 1 : RIALTO_SERVER_LOG_MIL("AutoVideoSink child sink is not the same as the one stored");
2696 1 : return;
2697 : }
2698 :
2699 2 : m_context.autoVideoChildSink = nullptr;
2700 : }
2701 : }
2702 :
2703 3 : void GstGenericPlayer::removeAutoAudioSinkChild(GObject *object)
2704 : {
2705 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2706 : {
2707 3 : RIALTO_SERVER_LOG_DEBUG("Remove AutoAudioSink child sink");
2708 :
2709 3 : if (m_context.autoAudioChildSink && m_context.autoAudioChildSink != GST_ELEMENT(object))
2710 : {
2711 1 : RIALTO_SERVER_LOG_MIL("AutoAudioSink child sink is not the same as the one stored");
2712 1 : return;
2713 : }
2714 :
2715 2 : m_context.autoAudioChildSink = nullptr;
2716 : }
2717 : }
2718 :
2719 14 : GstElement *GstGenericPlayer::getSinkChildIfAutoVideoSink(GstElement *sink) const
2720 : {
2721 14 : const gchar *kTmpName = m_glibWrapper->gTypeName(G_OBJECT_TYPE(sink));
2722 14 : if (!kTmpName)
2723 0 : return sink;
2724 :
2725 28 : const std::string kElementTypeName{kTmpName};
2726 14 : if (kElementTypeName == "GstAutoVideoSink")
2727 : {
2728 1 : if (!m_context.autoVideoChildSink)
2729 : {
2730 0 : RIALTO_SERVER_LOG_WARN("No child sink has been added to the autovideosink");
2731 : }
2732 : else
2733 : {
2734 1 : return m_context.autoVideoChildSink;
2735 : }
2736 : }
2737 13 : return sink;
2738 14 : }
2739 :
2740 16 : GstElement *GstGenericPlayer::getSinkChildIfAutoAudioSink(GstElement *sink) const
2741 : {
2742 16 : const gchar *kTmpName = m_glibWrapper->gTypeName(G_OBJECT_TYPE(sink));
2743 16 : if (!kTmpName)
2744 0 : return sink;
2745 :
2746 32 : const std::string kElementTypeName{kTmpName};
2747 16 : if (kElementTypeName == "GstAutoAudioSink")
2748 : {
2749 1 : if (!m_context.autoAudioChildSink)
2750 : {
2751 0 : RIALTO_SERVER_LOG_WARN("No child sink has been added to the autoaudiosink");
2752 : }
2753 : else
2754 : {
2755 1 : return m_context.autoAudioChildSink;
2756 : }
2757 : }
2758 15 : return sink;
2759 16 : }
2760 :
2761 215 : void GstGenericPlayer::setPlaybinFlags(bool enableAudio)
2762 : {
2763 215 : unsigned flags = getGstPlayFlag("video") | getGstPlayFlag("native-video") | getGstPlayFlag("text");
2764 :
2765 215 : if (enableAudio)
2766 : {
2767 215 : flags |= getGstPlayFlag("audio");
2768 215 : flags |= shouldEnableNativeAudio() ? getGstPlayFlag("native-audio") : 0;
2769 : }
2770 :
2771 215 : m_glibWrapper->gObjectSet(m_context.pipeline, "flags", flags, nullptr);
2772 : }
2773 :
2774 215 : bool GstGenericPlayer::shouldEnableNativeAudio()
2775 : {
2776 215 : GstElementFactory *factory = m_gstWrapper->gstElementFactoryFind("brcmaudiosink");
2777 215 : if (factory)
2778 : {
2779 1 : m_gstWrapper->gstObjectUnref(GST_OBJECT(factory));
2780 1 : return true;
2781 : }
2782 214 : return false;
2783 : }
2784 :
2785 : }; // namespace firebolt::rialto::server
|