Line data Source code
1 : /*
2 : * If not stated otherwise in this file or this component's LICENSE file the
3 : * following copyright and licenses apply:
4 : *
5 : * Copyright 2022 Sky UK
6 : *
7 : * Licensed under the Apache License, Version 2.0 (the "License");
8 : * you may not use this file except in compliance with the License.
9 : * You may obtain a copy of the License at
10 : *
11 : * http://www.apache.org/licenses/LICENSE-2.0
12 : *
13 : * Unless required by applicable law or agreed to in writing, software
14 : * distributed under the License is distributed on an "AS IS" BASIS,
15 : * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 : * See the License for the specific language governing permissions and
17 : * limitations under the License.
18 : */
19 :
20 : #include <chrono>
21 : #include <cinttypes>
22 : #include <stdexcept>
23 :
24 : #include "FlushWatcher.h"
25 : #include "GstDispatcherThread.h"
26 : #include "GstGenericPlayer.h"
27 : #include "GstProtectionMetadata.h"
28 : #include "IGstTextTrackSinkFactory.h"
29 : #include "IMediaPipeline.h"
30 : #include "ITimer.h"
31 : #include "RialtoServerLogging.h"
32 : #include "TypeConverters.h"
33 : #include "Utils.h"
34 : #include "WorkerThread.h"
35 : #include "tasks/generic/GenericPlayerTaskFactory.h"
36 :
37 : namespace
38 : {
39 : /**
40 : * @brief Report position interval in ms.
41 : * The position reporting timer should be started whenever the PLAYING state is entered and stopped
42 : * whenever the session moves to another playback state.
43 : */
44 : constexpr std::chrono::milliseconds kPositionReportTimerMs{250};
45 : constexpr std::chrono::seconds kSubtitleClockResyncInterval{10};
46 :
47 1 : bool operator==(const firebolt::rialto::server::SegmentData &lhs, const firebolt::rialto::server::SegmentData &rhs)
48 : {
49 2 : return (lhs.position == rhs.position) && (lhs.resetTime == rhs.resetTime) && (lhs.appliedRate == rhs.appliedRate) &&
50 2 : (lhs.stopPosition == rhs.stopPosition);
51 : }
52 : } // namespace
53 :
54 : namespace firebolt::rialto::server
55 : {
56 : std::weak_ptr<IGstGenericPlayerFactory> GstGenericPlayerFactory::m_factory;
57 :
58 3 : std::shared_ptr<IGstGenericPlayerFactory> IGstGenericPlayerFactory::getFactory()
59 : {
60 3 : std::shared_ptr<IGstGenericPlayerFactory> factory = GstGenericPlayerFactory::m_factory.lock();
61 :
62 3 : if (!factory)
63 : {
64 : try
65 : {
66 3 : factory = std::make_shared<GstGenericPlayerFactory>();
67 : }
68 0 : catch (const std::exception &e)
69 : {
70 0 : RIALTO_SERVER_LOG_ERROR("Failed to create the gstreamer player factory, reason: %s", e.what());
71 : }
72 :
73 3 : GstGenericPlayerFactory::m_factory = factory;
74 : }
75 :
76 3 : return factory;
77 : }
78 :
79 1 : std::unique_ptr<IGstGenericPlayer> GstGenericPlayerFactory::createGstGenericPlayer(
80 : IGstGenericPlayerClient *client, IDecryptionService &decryptionService, MediaType type,
81 : const VideoRequirements &videoRequirements,
82 : const std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapperFactory> &rdkGstreamerUtilsWrapperFactory)
83 : {
84 1 : std::unique_ptr<IGstGenericPlayer> gstPlayer;
85 :
86 : try
87 : {
88 1 : auto gstWrapperFactory = firebolt::rialto::wrappers::IGstWrapperFactory::getFactory();
89 1 : auto glibWrapperFactory = firebolt::rialto::wrappers::IGlibWrapperFactory::getFactory();
90 1 : std::shared_ptr<firebolt::rialto::wrappers::IGstWrapper> gstWrapper;
91 1 : std::shared_ptr<firebolt::rialto::wrappers::IGlibWrapper> glibWrapper;
92 1 : std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapper> rdkGstreamerUtilsWrapper;
93 1 : if ((!gstWrapperFactory) || (!(gstWrapper = gstWrapperFactory->getGstWrapper())))
94 : {
95 0 : throw std::runtime_error("Cannot create GstWrapper");
96 : }
97 1 : if ((!glibWrapperFactory) || (!(glibWrapper = glibWrapperFactory->getGlibWrapper())))
98 : {
99 0 : throw std::runtime_error("Cannot create GlibWrapper");
100 : }
101 2 : if ((!rdkGstreamerUtilsWrapperFactory) ||
102 2 : (!(rdkGstreamerUtilsWrapper = rdkGstreamerUtilsWrapperFactory->createRdkGstreamerUtilsWrapper())))
103 : {
104 0 : throw std::runtime_error("Cannot create RdkGstreamerUtilsWrapper");
105 : }
106 : gstPlayer = std::make_unique<
107 2 : GstGenericPlayer>(client, decryptionService, type, videoRequirements, gstWrapper, glibWrapper,
108 2 : rdkGstreamerUtilsWrapper, IGstInitialiser::instance(), std::make_unique<FlushWatcher>(),
109 2 : IGstSrcFactory::getFactory(), common::ITimerFactory::getFactory(),
110 2 : std::make_unique<GenericPlayerTaskFactory>(client, gstWrapper, glibWrapper,
111 : rdkGstreamerUtilsWrapper,
112 2 : IGstTextTrackSinkFactory::createFactory()),
113 2 : std::make_unique<WorkerThreadFactory>(), std::make_unique<GstDispatcherThreadFactory>(),
114 3 : IGstProtectionMetadataHelperFactory::createFactory());
115 1 : }
116 0 : catch (const std::exception &e)
117 : {
118 0 : RIALTO_SERVER_LOG_ERROR("Failed to create the gstreamer player, reason: %s", e.what());
119 : }
120 :
121 1 : return gstPlayer;
122 : }
123 :
124 219 : GstGenericPlayer::GstGenericPlayer(
125 : IGstGenericPlayerClient *client, IDecryptionService &decryptionService, MediaType type,
126 : const VideoRequirements &videoRequirements,
127 : const std::shared_ptr<firebolt::rialto::wrappers::IGstWrapper> &gstWrapper,
128 : const std::shared_ptr<firebolt::rialto::wrappers::IGlibWrapper> &glibWrapper,
129 : const std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapper> &rdkGstreamerUtilsWrapper,
130 : const IGstInitialiser &gstInitialiser, std::unique_ptr<IFlushWatcher> &&flushWatcher,
131 : const std::shared_ptr<IGstSrcFactory> &gstSrcFactory, std::shared_ptr<common::ITimerFactory> timerFactory,
132 : std::unique_ptr<IGenericPlayerTaskFactory> taskFactory, std::unique_ptr<IWorkerThreadFactory> workerThreadFactory,
133 : std::unique_ptr<IGstDispatcherThreadFactory> gstDispatcherThreadFactory,
134 219 : std::shared_ptr<IGstProtectionMetadataHelperFactory> gstProtectionMetadataFactory)
135 219 : : m_gstPlayerClient(client), m_gstWrapper{gstWrapper}, m_glibWrapper{glibWrapper},
136 438 : m_rdkGstreamerUtilsWrapper{rdkGstreamerUtilsWrapper}, m_timerFactory{timerFactory},
137 657 : m_taskFactory{std::move(taskFactory)}, m_flushWatcher{std::move(flushWatcher)}
138 : {
139 219 : RIALTO_SERVER_LOG_DEBUG("GstGenericPlayer is constructed.");
140 :
141 219 : gstInitialiser.waitForInitialisation();
142 :
143 219 : m_context.decryptionService = &decryptionService;
144 :
145 219 : if ((!gstSrcFactory) || (!(m_context.gstSrc = gstSrcFactory->getGstSrc())))
146 : {
147 2 : throw std::runtime_error("Cannot create GstSrc");
148 : }
149 :
150 217 : if (!timerFactory)
151 : {
152 1 : throw std::runtime_error("TimeFactory is invalid");
153 : }
154 :
155 432 : if ((!gstProtectionMetadataFactory) ||
156 432 : (!(m_protectionMetadataWrapper = gstProtectionMetadataFactory->createProtectionMetadataWrapper(m_gstWrapper))))
157 : {
158 0 : throw std::runtime_error("Cannot create protection metadata wrapper");
159 : }
160 :
161 : // Ensure that rialtosrc has been initalised
162 216 : m_context.gstSrc->initSrc();
163 :
164 : // Start task thread
165 216 : if ((!workerThreadFactory) || (!(m_workerThread = workerThreadFactory->createWorkerThread())))
166 : {
167 0 : throw std::runtime_error("Failed to create the worker thread");
168 : }
169 :
170 : // Initialise pipeline
171 216 : switch (type)
172 : {
173 215 : case MediaType::MSE:
174 : {
175 215 : initMsePipeline();
176 215 : break;
177 : }
178 1 : default:
179 : {
180 1 : resetWorkerThread();
181 1 : throw std::runtime_error("Media type not supported");
182 : }
183 : }
184 :
185 : // Check the video requirements for a limited video.
186 : // If the video requirements are set to anything lower than the minimum, this playback is assumed to be a secondary
187 : // video in a dual video scenario.
188 215 : if ((kMinPrimaryVideoWidth > videoRequirements.maxWidth) || (kMinPrimaryVideoHeight > videoRequirements.maxHeight))
189 : {
190 8 : RIALTO_SERVER_LOG_MIL("Secondary video playback selected");
191 8 : bool westerossinkSecondaryVideoResult = setWesterossinkSecondaryVideo();
192 8 : bool ermContextResult = setErmContext();
193 8 : if (!westerossinkSecondaryVideoResult && !ermContextResult)
194 : {
195 1 : resetWorkerThread();
196 1 : termPipeline();
197 1 : throw std::runtime_error("Could not set secondary video");
198 : }
199 7 : }
200 : else
201 : {
202 207 : RIALTO_SERVER_LOG_MIL("Primary video playback selected");
203 : }
204 :
205 : m_gstDispatcherThread =
206 214 : gstDispatcherThreadFactory->createGstDispatcherThread(*this, m_context.pipeline, m_gstWrapper);
207 304 : }
208 :
209 428 : GstGenericPlayer::~GstGenericPlayer()
210 : {
211 214 : RIALTO_SERVER_LOG_DEBUG("GstGenericPlayer is destructed.");
212 214 : m_gstDispatcherThread.reset();
213 :
214 214 : resetWorkerThread();
215 :
216 214 : termPipeline();
217 428 : }
218 :
219 215 : void GstGenericPlayer::initMsePipeline()
220 : {
221 : // Make playbin
222 215 : m_context.pipeline = m_gstWrapper->gstElementFactoryMake("playbin", "media_pipeline");
223 : // Set pipeline flags
224 215 : setPlaybinFlags(true);
225 :
226 : // Set callbacks
227 215 : m_glibWrapper->gSignalConnect(m_context.pipeline, "source-setup", G_CALLBACK(&GstGenericPlayer::setupSource), this);
228 215 : m_glibWrapper->gSignalConnect(m_context.pipeline, "element-setup", G_CALLBACK(&GstGenericPlayer::setupElement), this);
229 215 : m_glibWrapper->gSignalConnect(m_context.pipeline, "deep-element-added",
230 : G_CALLBACK(&GstGenericPlayer::deepElementAdded), this);
231 :
232 : // Set uri
233 215 : m_glibWrapper->gObjectSet(m_context.pipeline, "uri", "rialto://", nullptr);
234 :
235 : // Check playsink
236 215 : GstElement *playsink = (m_gstWrapper->gstBinGetByName(GST_BIN(m_context.pipeline), "playsink"));
237 215 : if (playsink)
238 : {
239 214 : m_glibWrapper->gObjectSet(G_OBJECT(playsink), "send-event-mode", 0, nullptr);
240 214 : m_gstWrapper->gstObjectUnref(playsink);
241 : }
242 : else
243 : {
244 1 : GST_WARNING("No playsink ?!?!?");
245 : }
246 215 : if (GST_STATE_CHANGE_FAILURE == m_gstWrapper->gstElementSetState(m_context.pipeline, GST_STATE_READY))
247 : {
248 1 : GST_WARNING("Failed to set pipeline to READY state");
249 : }
250 215 : RIALTO_SERVER_LOG_MIL("New RialtoServer's pipeline created");
251 : }
252 :
253 216 : void GstGenericPlayer::resetWorkerThread()
254 : {
255 216 : m_postponedFlushes.clear();
256 : // Shutdown task thread
257 216 : m_workerThread->enqueueTask(m_taskFactory->createShutdown(*this));
258 216 : m_workerThread->join();
259 216 : m_workerThread.reset();
260 : }
261 :
262 215 : void GstGenericPlayer::termPipeline()
263 : {
264 215 : if (m_finishSourceSetupTimer && m_finishSourceSetupTimer->isActive())
265 : {
266 0 : m_finishSourceSetupTimer->cancel();
267 : }
268 :
269 215 : m_finishSourceSetupTimer.reset();
270 :
271 264 : for (auto &elem : m_context.streamInfo)
272 : {
273 49 : StreamInfo &streamInfo = elem.second;
274 51 : for (auto &buffer : streamInfo.buffers)
275 : {
276 2 : m_gstWrapper->gstBufferUnref(buffer);
277 : }
278 :
279 49 : streamInfo.buffers.clear();
280 : }
281 :
282 215 : m_taskFactory->createStop(m_context, *this)->execute();
283 215 : GstBus *bus = m_gstWrapper->gstPipelineGetBus(GST_PIPELINE(m_context.pipeline));
284 215 : m_gstWrapper->gstBusSetSyncHandler(bus, nullptr, nullptr, nullptr);
285 215 : m_gstWrapper->gstObjectUnref(bus);
286 :
287 215 : if (m_context.source)
288 : {
289 1 : m_gstWrapper->gstObjectUnref(m_context.source);
290 : }
291 215 : if (m_context.subtitleSink)
292 : {
293 4 : m_gstWrapper->gstObjectUnref(m_context.subtitleSink);
294 4 : m_context.subtitleSink = nullptr;
295 : }
296 :
297 215 : if (m_context.videoSink)
298 : {
299 0 : m_gstWrapper->gstObjectUnref(m_context.videoSink);
300 0 : m_context.videoSink = nullptr;
301 : }
302 :
303 : // Delete the pipeline
304 215 : m_gstWrapper->gstObjectUnref(m_context.pipeline);
305 :
306 215 : RIALTO_SERVER_LOG_MIL("RialtoServer's pipeline terminated");
307 : }
308 :
309 861 : unsigned GstGenericPlayer::getGstPlayFlag(const char *nick)
310 : {
311 : GFlagsClass *flagsClass =
312 861 : static_cast<GFlagsClass *>(m_glibWrapper->gTypeClassRef(m_glibWrapper->gTypeFromName("GstPlayFlags")));
313 861 : GFlagsValue *flag = m_glibWrapper->gFlagsGetValueByNick(flagsClass, nick);
314 861 : return flag ? flag->value : 0;
315 : }
316 :
317 1 : void GstGenericPlayer::setupSource(GstElement *pipeline, GstElement *source, GstGenericPlayer *self)
318 : {
319 1 : self->m_gstWrapper->gstObjectRef(source);
320 1 : if (self->m_workerThread)
321 : {
322 1 : self->m_workerThread->enqueueTask(self->m_taskFactory->createSetupSource(self->m_context, *self, source));
323 : }
324 : }
325 :
326 1 : void GstGenericPlayer::setupElement(GstElement *pipeline, GstElement *element, GstGenericPlayer *self)
327 : {
328 1 : RIALTO_SERVER_LOG_DEBUG("Element %s added to the pipeline", GST_ELEMENT_NAME(element));
329 1 : self->m_gstWrapper->gstObjectRef(element);
330 1 : if (self->m_workerThread)
331 : {
332 1 : self->m_workerThread->enqueueTask(self->m_taskFactory->createSetupElement(self->m_context, *self, element));
333 : }
334 : }
335 :
336 1 : void GstGenericPlayer::deepElementAdded(GstBin *pipeline, GstBin *bin, GstElement *element, GstGenericPlayer *self)
337 : {
338 1 : RIALTO_SERVER_LOG_DEBUG("Deep element %s added to the pipeline", GST_ELEMENT_NAME(element));
339 1 : if (self->m_workerThread)
340 : {
341 2 : self->m_workerThread->enqueueTask(
342 2 : self->m_taskFactory->createDeepElementAdded(self->m_context, *self, pipeline, bin, element));
343 : }
344 1 : }
345 :
346 1 : void GstGenericPlayer::attachSource(const std::unique_ptr<IMediaPipeline::MediaSource> &attachedSource)
347 : {
348 1 : if (m_workerThread)
349 : {
350 1 : m_workerThread->enqueueTask(m_taskFactory->createAttachSource(m_context, *this, attachedSource));
351 : }
352 : }
353 :
354 1 : void GstGenericPlayer::removeSource(const MediaSourceType &mediaSourceType)
355 : {
356 1 : if (m_workerThread)
357 : {
358 1 : m_workerThread->enqueueTask(m_taskFactory->createRemoveSource(m_context, *this, mediaSourceType));
359 : }
360 : }
361 :
362 2 : void GstGenericPlayer::allSourcesAttached()
363 : {
364 2 : if (m_workerThread)
365 : {
366 2 : m_workerThread->enqueueTask(m_taskFactory->createFinishSetupSource(m_context, *this));
367 : }
368 : }
369 :
370 1 : void GstGenericPlayer::attachSamples(const IMediaPipeline::MediaSegmentVector &mediaSegments)
371 : {
372 1 : if (m_workerThread)
373 : {
374 1 : m_workerThread->enqueueTask(m_taskFactory->createAttachSamples(m_context, *this, mediaSegments));
375 : }
376 : }
377 :
378 1 : void GstGenericPlayer::attachSamples(const std::shared_ptr<IDataReader> &dataReader)
379 : {
380 1 : if (m_workerThread)
381 : {
382 1 : m_workerThread->enqueueTask(m_taskFactory->createReadShmDataAndAttachSamples(m_context, *this, dataReader));
383 : }
384 : }
385 :
386 1 : void GstGenericPlayer::setPosition(std::int64_t position)
387 : {
388 1 : if (m_workerThread)
389 : {
390 1 : m_workerThread->enqueueTask(m_taskFactory->createSetPosition(m_context, *this, position));
391 : }
392 : }
393 :
394 1 : void GstGenericPlayer::setPlaybackRate(double rate)
395 : {
396 1 : if (m_workerThread)
397 : {
398 1 : m_workerThread->enqueueTask(m_taskFactory->createSetPlaybackRate(m_context, rate));
399 : }
400 : }
401 :
402 11 : bool GstGenericPlayer::getPosition(std::int64_t &position)
403 : {
404 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
405 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
406 11 : position = getPosition(m_context.pipeline);
407 11 : if (position == -1)
408 : {
409 3 : return false;
410 : }
411 :
412 8 : return true;
413 : }
414 :
415 44 : GstElement *GstGenericPlayer::getSink(const MediaSourceType &mediaSourceType) const
416 : {
417 44 : const char *kSinkName{nullptr};
418 44 : GstElement *sink{nullptr};
419 44 : switch (mediaSourceType)
420 : {
421 24 : case MediaSourceType::AUDIO:
422 24 : kSinkName = "audio-sink";
423 24 : break;
424 18 : case MediaSourceType::VIDEO:
425 18 : kSinkName = "video-sink";
426 18 : break;
427 2 : default:
428 2 : break;
429 : }
430 44 : if (!kSinkName)
431 : {
432 2 : RIALTO_SERVER_LOG_WARN("mediaSourceType not supported %d", static_cast<int>(mediaSourceType));
433 : }
434 : else
435 : {
436 42 : if (m_context.pipeline == nullptr)
437 : {
438 0 : RIALTO_SERVER_LOG_WARN("Pipeline is NULL!");
439 : }
440 : else
441 : {
442 42 : RIALTO_SERVER_LOG_DEBUG("Pipeline is valid: %p", m_context.pipeline);
443 : }
444 42 : m_glibWrapper->gObjectGet(m_context.pipeline, kSinkName, &sink, nullptr);
445 42 : if (sink)
446 : {
447 25 : GstElement *autoSink{sink};
448 25 : if (firebolt::rialto::MediaSourceType::VIDEO == mediaSourceType)
449 14 : autoSink = getSinkChildIfAutoVideoSink(sink);
450 11 : else if (firebolt::rialto::MediaSourceType::AUDIO == mediaSourceType)
451 11 : autoSink = getSinkChildIfAutoAudioSink(sink);
452 :
453 : // Is this an auto-sink?...
454 25 : if (autoSink != sink)
455 : {
456 2 : m_gstWrapper->gstObjectUnref(GST_OBJECT(sink));
457 :
458 : // increase the reference count of the auto sink
459 2 : sink = GST_ELEMENT(m_gstWrapper->gstObjectRef(GST_OBJECT(autoSink)));
460 : }
461 : }
462 : }
463 44 : return sink;
464 : }
465 :
466 1 : void GstGenericPlayer::setSourceFlushed(const MediaSourceType &mediaSourceType)
467 : {
468 1 : m_flushWatcher->setFlushed(mediaSourceType);
469 : }
470 :
471 1 : void GstGenericPlayer::postponeFlush(const MediaSourceType &mediaSourceType, bool resetTime)
472 : {
473 1 : m_postponedFlushes.emplace_back(std::make_pair(mediaSourceType, resetTime));
474 : }
475 :
476 1 : void GstGenericPlayer::executePostponedFlushes()
477 : {
478 1 : if (m_workerThread)
479 : {
480 2 : for (const auto &[mediaSourceType, resetTime] : m_postponedFlushes)
481 : {
482 1 : m_workerThread->enqueueTask(m_taskFactory->createFlush(m_context, *this, mediaSourceType, resetTime));
483 : }
484 : }
485 1 : m_postponedFlushes.clear();
486 : }
487 :
488 6 : void GstGenericPlayer::notifyPlaybackInfo()
489 : {
490 6 : PlaybackInfo info;
491 6 : getPosition(info.currentPosition);
492 6 : getVolume(info.volume);
493 6 : m_gstPlayerClient->notifyPlaybackInfo(info);
494 : }
495 :
496 19 : GstElement *GstGenericPlayer::getDecoder(const MediaSourceType &mediaSourceType)
497 : {
498 19 : GstIterator *it = m_gstWrapper->gstBinIterateRecurse(GST_BIN(m_context.pipeline));
499 19 : GValue item = G_VALUE_INIT;
500 19 : gboolean done = FALSE;
501 :
502 28 : while (!done)
503 : {
504 21 : switch (m_gstWrapper->gstIteratorNext(it, &item))
505 : {
506 12 : case GST_ITERATOR_OK:
507 : {
508 12 : GstElement *element = GST_ELEMENT(m_glibWrapper->gValueGetObject(&item));
509 12 : GstElementFactory *factory = m_gstWrapper->gstElementGetFactory(element);
510 :
511 12 : if (factory)
512 : {
513 12 : GstElementFactoryListType type = GST_ELEMENT_FACTORY_TYPE_DECODER;
514 12 : if (mediaSourceType == MediaSourceType::AUDIO)
515 : {
516 12 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO;
517 : }
518 0 : else if (mediaSourceType == MediaSourceType::VIDEO)
519 : {
520 0 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO;
521 : }
522 :
523 12 : if (m_gstWrapper->gstElementFactoryListIsType(factory, type))
524 : {
525 12 : m_glibWrapper->gValueUnset(&item);
526 12 : m_gstWrapper->gstIteratorFree(it);
527 12 : return GST_ELEMENT(m_gstWrapper->gstObjectRef(element));
528 : }
529 : }
530 :
531 0 : m_glibWrapper->gValueUnset(&item);
532 0 : break;
533 : }
534 2 : case GST_ITERATOR_RESYNC:
535 2 : m_gstWrapper->gstIteratorResync(it);
536 2 : break;
537 7 : case GST_ITERATOR_ERROR:
538 : case GST_ITERATOR_DONE:
539 7 : done = TRUE;
540 7 : break;
541 : }
542 : }
543 :
544 7 : RIALTO_SERVER_LOG_WARN("Could not find decoder");
545 :
546 7 : m_glibWrapper->gValueUnset(&item);
547 7 : m_gstWrapper->gstIteratorFree(it);
548 :
549 7 : return nullptr;
550 : }
551 :
552 3 : GstElement *GstGenericPlayer::getParser(const MediaSourceType &mediaSourceType)
553 : {
554 3 : GstIterator *it = m_gstWrapper->gstBinIterateRecurse(GST_BIN(m_context.pipeline));
555 3 : GValue item = G_VALUE_INIT;
556 3 : gboolean done = FALSE;
557 :
558 4 : while (!done)
559 : {
560 3 : switch (m_gstWrapper->gstIteratorNext(it, &item))
561 : {
562 2 : case GST_ITERATOR_OK:
563 : {
564 2 : GstElement *element = GST_ELEMENT(m_glibWrapper->gValueGetObject(&item));
565 2 : GstElementFactory *factory = m_gstWrapper->gstElementGetFactory(element);
566 :
567 2 : if (factory)
568 : {
569 2 : GstElementFactoryListType type = GST_ELEMENT_FACTORY_TYPE_PARSER;
570 2 : if (mediaSourceType == MediaSourceType::AUDIO)
571 : {
572 0 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO;
573 : }
574 2 : else if (mediaSourceType == MediaSourceType::VIDEO)
575 : {
576 2 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO;
577 : }
578 :
579 2 : if (m_gstWrapper->gstElementFactoryListIsType(factory, type))
580 : {
581 2 : m_glibWrapper->gValueUnset(&item);
582 2 : m_gstWrapper->gstIteratorFree(it);
583 2 : return GST_ELEMENT(m_gstWrapper->gstObjectRef(element));
584 : }
585 : }
586 :
587 0 : m_glibWrapper->gValueUnset(&item);
588 0 : break;
589 : }
590 0 : case GST_ITERATOR_RESYNC:
591 0 : m_gstWrapper->gstIteratorResync(it);
592 0 : break;
593 1 : case GST_ITERATOR_ERROR:
594 : case GST_ITERATOR_DONE:
595 1 : done = TRUE;
596 1 : break;
597 : }
598 : }
599 :
600 1 : RIALTO_SERVER_LOG_WARN("Could not find parser");
601 :
602 1 : m_glibWrapper->gValueUnset(&item);
603 1 : m_gstWrapper->gstIteratorFree(it);
604 :
605 1 : return nullptr;
606 : }
607 :
608 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate>
609 5 : GstGenericPlayer::createAudioAttributes(const std::unique_ptr<IMediaPipeline::MediaSource> &source) const
610 : {
611 5 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate> audioAttributes;
612 5 : const IMediaPipeline::MediaSourceAudio *kSource = dynamic_cast<IMediaPipeline::MediaSourceAudio *>(source.get());
613 5 : if (kSource)
614 : {
615 4 : firebolt::rialto::AudioConfig audioConfig = kSource->getAudioConfig();
616 : audioAttributes =
617 12 : firebolt::rialto::wrappers::AudioAttributesPrivate{"", // param set below.
618 4 : audioConfig.numberOfChannels, audioConfig.sampleRate,
619 : 0, // used only in one of logs in rdk_gstreamer_utils, no
620 : // need to set this param.
621 : 0, // used only in one of logs in rdk_gstreamer_utils, no
622 : // need to set this param.
623 4 : audioConfig.codecSpecificConfig.data(),
624 : static_cast<std::uint32_t>(
625 4 : audioConfig.codecSpecificConfig.size())};
626 4 : if (source->getMimeType() == "audio/mp4" || source->getMimeType() == "audio/aac")
627 : {
628 2 : audioAttributes->m_codecParam = "mp4a";
629 : }
630 2 : else if (source->getMimeType() == "audio/x-eac3")
631 : {
632 1 : audioAttributes->m_codecParam = "ec-3";
633 : }
634 1 : else if (source->getMimeType() == "audio/b-wav" || source->getMimeType() == "audio/x-raw")
635 : {
636 1 : audioAttributes->m_codecParam = "lpcm";
637 : }
638 4 : }
639 : else
640 : {
641 1 : RIALTO_SERVER_LOG_ERROR("Failed to cast source");
642 : }
643 :
644 5 : return audioAttributes;
645 : }
646 :
647 1 : bool GstGenericPlayer::setImmediateOutput(const MediaSourceType &mediaSourceType, bool immediateOutputParam)
648 : {
649 1 : if (!m_workerThread)
650 0 : return false;
651 :
652 2 : m_workerThread->enqueueTask(
653 2 : m_taskFactory->createSetImmediateOutput(m_context, *this, mediaSourceType, immediateOutputParam));
654 1 : return true;
655 : }
656 :
657 5 : bool GstGenericPlayer::getImmediateOutput(const MediaSourceType &mediaSourceType, bool &immediateOutputRef)
658 : {
659 5 : bool returnValue{false};
660 5 : GstElement *sink{getSink(mediaSourceType)};
661 5 : if (sink)
662 : {
663 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "immediate-output"))
664 : {
665 2 : m_glibWrapper->gObjectGet(sink, "immediate-output", &immediateOutputRef, nullptr);
666 2 : returnValue = true;
667 : }
668 : else
669 : {
670 1 : RIALTO_SERVER_LOG_ERROR("immediate-output not supported in element %s", GST_ELEMENT_NAME(sink));
671 : }
672 3 : m_gstWrapper->gstObjectUnref(sink);
673 : }
674 : else
675 : {
676 2 : RIALTO_SERVER_LOG_ERROR("Failed to set immediate-output property, sink is NULL");
677 : }
678 :
679 5 : return returnValue;
680 : }
681 :
682 5 : bool GstGenericPlayer::getStats(const MediaSourceType &mediaSourceType, uint64_t &renderedFrames, uint64_t &droppedFrames)
683 : {
684 5 : bool returnValue{false};
685 5 : GstElement *sink{getSink(mediaSourceType)};
686 5 : if (sink)
687 : {
688 3 : GstStructure *stats{nullptr};
689 3 : m_glibWrapper->gObjectGet(sink, "stats", &stats, nullptr);
690 3 : if (!stats)
691 : {
692 1 : RIALTO_SERVER_LOG_ERROR("failed to get stats from '%s'", GST_ELEMENT_NAME(sink));
693 : }
694 : else
695 : {
696 : guint64 renderedFramesTmp;
697 : guint64 droppedFramesTmp;
698 3 : if (m_gstWrapper->gstStructureGetUint64(stats, "rendered", &renderedFramesTmp) &&
699 1 : m_gstWrapper->gstStructureGetUint64(stats, "dropped", &droppedFramesTmp))
700 : {
701 1 : renderedFrames = renderedFramesTmp;
702 1 : droppedFrames = droppedFramesTmp;
703 1 : returnValue = true;
704 : }
705 : else
706 : {
707 1 : RIALTO_SERVER_LOG_ERROR("failed to get 'rendered' or 'dropped' from structure (%s)",
708 : GST_ELEMENT_NAME(sink));
709 : }
710 2 : m_gstWrapper->gstStructureFree(stats);
711 : }
712 3 : m_gstWrapper->gstObjectUnref(sink);
713 : }
714 : else
715 : {
716 2 : RIALTO_SERVER_LOG_ERROR("Failed to get stats, sink is NULL");
717 : }
718 :
719 5 : return returnValue;
720 : }
721 :
722 4 : GstBuffer *GstGenericPlayer::createBuffer(const IMediaPipeline::MediaSegment &mediaSegment) const
723 : {
724 4 : GstBuffer *gstBuffer = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getDataLength(), nullptr);
725 4 : m_gstWrapper->gstBufferFill(gstBuffer, 0, mediaSegment.getData(), mediaSegment.getDataLength());
726 :
727 4 : if (mediaSegment.isEncrypted())
728 : {
729 3 : GstBuffer *keyId = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getKeyId().size(), nullptr);
730 3 : m_gstWrapper->gstBufferFill(keyId, 0, mediaSegment.getKeyId().data(), mediaSegment.getKeyId().size());
731 :
732 3 : GstBuffer *initVector = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getInitVector().size(), nullptr);
733 6 : m_gstWrapper->gstBufferFill(initVector, 0, mediaSegment.getInitVector().data(),
734 3 : mediaSegment.getInitVector().size());
735 3 : GstBuffer *subsamples{nullptr};
736 3 : if (!mediaSegment.getSubSamples().empty())
737 : {
738 3 : auto subsamplesRawSize = mediaSegment.getSubSamples().size() * (sizeof(guint16) + sizeof(guint32));
739 3 : guint8 *subsamplesRaw = static_cast<guint8 *>(m_glibWrapper->gMalloc(subsamplesRawSize));
740 : GstByteWriter writer;
741 3 : m_gstWrapper->gstByteWriterInitWithData(&writer, subsamplesRaw, subsamplesRawSize, FALSE);
742 :
743 6 : for (const auto &subSample : mediaSegment.getSubSamples())
744 : {
745 3 : m_gstWrapper->gstByteWriterPutUint16Be(&writer, subSample.numClearBytes);
746 3 : m_gstWrapper->gstByteWriterPutUint32Be(&writer, subSample.numEncryptedBytes);
747 : }
748 3 : subsamples = m_gstWrapper->gstBufferNewWrapped(subsamplesRaw, subsamplesRawSize);
749 : }
750 :
751 3 : uint32_t crypt = 0;
752 3 : uint32_t skip = 0;
753 3 : bool encryptionPatternSet = mediaSegment.getEncryptionPattern(crypt, skip);
754 :
755 3 : GstRialtoProtectionData data = {mediaSegment.getMediaKeySessionId(),
756 3 : static_cast<uint32_t>(mediaSegment.getSubSamples().size()),
757 3 : mediaSegment.getInitWithLast15(),
758 : keyId,
759 : initVector,
760 : subsamples,
761 6 : mediaSegment.getCipherMode(),
762 : crypt,
763 : skip,
764 : encryptionPatternSet,
765 6 : m_context.decryptionService};
766 :
767 3 : if (!m_protectionMetadataWrapper->addProtectionMetadata(gstBuffer, data))
768 : {
769 1 : RIALTO_SERVER_LOG_ERROR("Failed to add protection metadata");
770 1 : if (keyId)
771 : {
772 1 : m_gstWrapper->gstBufferUnref(keyId);
773 : }
774 1 : if (initVector)
775 : {
776 1 : m_gstWrapper->gstBufferUnref(initVector);
777 : }
778 1 : if (subsamples)
779 : {
780 1 : m_gstWrapper->gstBufferUnref(subsamples);
781 : }
782 : }
783 : }
784 :
785 4 : GST_BUFFER_TIMESTAMP(gstBuffer) = mediaSegment.getTimeStamp();
786 4 : GST_BUFFER_DURATION(gstBuffer) = mediaSegment.getDuration();
787 4 : return gstBuffer;
788 : }
789 :
790 4 : void GstGenericPlayer::notifyNeedMediaData(const MediaSourceType mediaSource)
791 : {
792 4 : auto elem = m_context.streamInfo.find(mediaSource);
793 4 : if (elem != m_context.streamInfo.end())
794 : {
795 2 : StreamInfo &streamInfo = elem->second;
796 2 : streamInfo.isNeedDataPending = false;
797 :
798 : // Send new NeedMediaData if we still need it
799 2 : if (m_gstPlayerClient && streamInfo.isDataNeeded)
800 : {
801 2 : streamInfo.isNeedDataPending = m_gstPlayerClient->notifyNeedMediaData(mediaSource);
802 : }
803 : }
804 : else
805 : {
806 2 : RIALTO_SERVER_LOG_WARN("Media type %s could not be found", common::convertMediaSourceType(mediaSource));
807 : }
808 4 : }
809 :
810 19 : void GstGenericPlayer::attachData(const firebolt::rialto::MediaSourceType mediaType)
811 : {
812 19 : auto elem = m_context.streamInfo.find(mediaType);
813 19 : if (elem != m_context.streamInfo.end())
814 : {
815 16 : StreamInfo &streamInfo = elem->second;
816 16 : if (streamInfo.buffers.empty() || !streamInfo.isDataNeeded)
817 : {
818 2 : return;
819 : }
820 :
821 14 : if (firebolt::rialto::MediaSourceType::SUBTITLE == mediaType)
822 : {
823 2 : setTextTrackPositionIfRequired(streamInfo.appSrc);
824 : }
825 : else
826 : {
827 36 : pushSampleIfRequired(streamInfo.appSrc, common::convertMediaSourceType(mediaType));
828 : }
829 14 : if (mediaType == firebolt::rialto::MediaSourceType::AUDIO)
830 : {
831 : // This needs to be done before gstAppSrcPushBuffer() is
832 : // called because it can free the memory
833 7 : m_context.lastAudioSampleTimestamps = static_cast<int64_t>(GST_BUFFER_PTS(streamInfo.buffers.back()));
834 : }
835 :
836 28 : for (GstBuffer *buffer : streamInfo.buffers)
837 : {
838 14 : m_gstWrapper->gstAppSrcPushBuffer(GST_APP_SRC(streamInfo.appSrc), buffer);
839 : }
840 14 : streamInfo.buffers.clear();
841 14 : streamInfo.isDataPushed = true;
842 :
843 14 : const bool kIsSingle = m_context.streamInfo.size() == 1;
844 14 : bool allOtherStreamsPushed = std::all_of(m_context.streamInfo.begin(), m_context.streamInfo.end(),
845 15 : [](const auto &entry) { return entry.second.isDataPushed; });
846 :
847 14 : if (!m_context.bufferedNotificationSent && (allOtherStreamsPushed || kIsSingle) && m_gstPlayerClient)
848 : {
849 1 : m_context.bufferedNotificationSent = true;
850 1 : m_gstPlayerClient->notifyNetworkState(NetworkState::BUFFERED);
851 1 : RIALTO_SERVER_LOG_MIL("Buffered NetworkState reached");
852 : }
853 14 : cancelUnderflow(mediaType);
854 :
855 14 : const auto eosInfoIt = m_context.endOfStreamInfo.find(mediaType);
856 14 : if (eosInfoIt != m_context.endOfStreamInfo.end() && eosInfoIt->second == EosState::PENDING)
857 : {
858 0 : setEos(mediaType);
859 : }
860 : }
861 : }
862 :
863 7 : void GstGenericPlayer::updateAudioCaps(int32_t rate, int32_t channels, const std::shared_ptr<CodecData> &codecData)
864 : {
865 7 : auto elem = m_context.streamInfo.find(firebolt::rialto::MediaSourceType::AUDIO);
866 7 : if (elem != m_context.streamInfo.end())
867 : {
868 6 : StreamInfo &streamInfo = elem->second;
869 :
870 6 : constexpr int kInvalidRate{0}, kInvalidChannels{0};
871 6 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(streamInfo.appSrc));
872 6 : GstCaps *newCaps = m_gstWrapper->gstCapsCopy(currentCaps);
873 :
874 6 : if (rate != kInvalidRate)
875 : {
876 3 : m_gstWrapper->gstCapsSetSimple(newCaps, "rate", G_TYPE_INT, rate, NULL);
877 : }
878 :
879 6 : if (channels != kInvalidChannels)
880 : {
881 3 : m_gstWrapper->gstCapsSetSimple(newCaps, "channels", G_TYPE_INT, channels, NULL);
882 : }
883 :
884 6 : setCodecData(newCaps, codecData);
885 :
886 6 : if (!m_gstWrapper->gstCapsIsEqual(currentCaps, newCaps))
887 : {
888 5 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(streamInfo.appSrc), newCaps);
889 : }
890 :
891 6 : m_gstWrapper->gstCapsUnref(newCaps);
892 6 : m_gstWrapper->gstCapsUnref(currentCaps);
893 : }
894 7 : }
895 :
896 8 : void GstGenericPlayer::updateVideoCaps(int32_t width, int32_t height, Fraction frameRate,
897 : const std::shared_ptr<CodecData> &codecData)
898 : {
899 8 : auto elem = m_context.streamInfo.find(firebolt::rialto::MediaSourceType::VIDEO);
900 8 : if (elem != m_context.streamInfo.end())
901 : {
902 7 : StreamInfo &streamInfo = elem->second;
903 :
904 7 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(streamInfo.appSrc));
905 7 : GstCaps *newCaps = m_gstWrapper->gstCapsCopy(currentCaps);
906 :
907 7 : if (width > 0)
908 : {
909 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "width", G_TYPE_INT, width, NULL);
910 : }
911 :
912 7 : if (height > 0)
913 : {
914 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "height", G_TYPE_INT, height, NULL);
915 : }
916 :
917 7 : if ((kUndefinedSize != frameRate.numerator) && (kUndefinedSize != frameRate.denominator))
918 : {
919 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "framerate", GST_TYPE_FRACTION, frameRate.numerator,
920 : frameRate.denominator, NULL);
921 : }
922 :
923 7 : setCodecData(newCaps, codecData);
924 :
925 7 : if (!m_gstWrapper->gstCapsIsEqual(currentCaps, newCaps))
926 : {
927 6 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(streamInfo.appSrc), newCaps);
928 : }
929 :
930 7 : m_gstWrapper->gstCapsUnref(currentCaps);
931 7 : m_gstWrapper->gstCapsUnref(newCaps);
932 : }
933 8 : }
934 :
935 5 : void GstGenericPlayer::addAudioClippingToBuffer(GstBuffer *buffer, uint64_t clippingStart, uint64_t clippingEnd) const
936 : {
937 5 : if (clippingStart || clippingEnd)
938 : {
939 4 : if (m_gstWrapper->gstBufferAddAudioClippingMeta(buffer, GST_FORMAT_TIME, clippingStart, clippingEnd))
940 : {
941 3 : RIALTO_SERVER_LOG_DEBUG("Added audio clipping to buffer %p, start: %" PRIu64 ", end %" PRIu64, buffer,
942 : clippingStart, clippingEnd);
943 : }
944 : else
945 : {
946 1 : RIALTO_SERVER_LOG_WARN("Failed to add audio clipping to buffer %p, start: %" PRIu64 ", end %" PRIu64,
947 : buffer, clippingStart, clippingEnd);
948 : }
949 : }
950 5 : }
951 :
952 13 : bool GstGenericPlayer::setCodecData(GstCaps *caps, const std::shared_ptr<CodecData> &codecData) const
953 : {
954 13 : if (codecData && CodecDataType::BUFFER == codecData->type)
955 : {
956 7 : gpointer memory = m_glibWrapper->gMemdup(codecData->data.data(), codecData->data.size());
957 7 : GstBuffer *buf = m_gstWrapper->gstBufferNewWrapped(memory, codecData->data.size());
958 7 : m_gstWrapper->gstCapsSetSimple(caps, "codec_data", GST_TYPE_BUFFER, buf, nullptr);
959 7 : m_gstWrapper->gstBufferUnref(buf);
960 7 : return true;
961 : }
962 6 : if (codecData && CodecDataType::STRING == codecData->type)
963 : {
964 2 : std::string codecDataStr(codecData->data.begin(), codecData->data.end());
965 2 : m_gstWrapper->gstCapsSetSimple(caps, "codec_data", G_TYPE_STRING, codecDataStr.c_str(), nullptr);
966 2 : return true;
967 : }
968 4 : return false;
969 : }
970 :
971 12 : void GstGenericPlayer::pushSampleIfRequired(GstElement *source, const std::string &typeStr)
972 : {
973 12 : auto initialPosition = m_context.initialPositions.find(source);
974 12 : if (m_context.initialPositions.end() == initialPosition)
975 : {
976 : // Sending initial sample not needed
977 7 : return;
978 : }
979 : // GstAppSrc does not replace segment, if it's the same as previous one.
980 : // It causes problems with position reporing in amlogic devices, so we need to push
981 : // two segments with different reset time value.
982 5 : pushAdditionalSegmentIfRequired(source);
983 :
984 10 : for (const auto &[position, resetTime, appliedRate, stopPosition] : initialPosition->second)
985 : {
986 6 : GstSeekFlags seekFlag = resetTime ? GST_SEEK_FLAG_FLUSH : GST_SEEK_FLAG_NONE;
987 6 : RIALTO_SERVER_LOG_DEBUG("Pushing new %s sample...", typeStr.c_str());
988 6 : GstSegment *segment{m_gstWrapper->gstSegmentNew()};
989 6 : m_gstWrapper->gstSegmentInit(segment, GST_FORMAT_TIME);
990 6 : if (!m_gstWrapper->gstSegmentDoSeek(segment, m_context.playbackRate, GST_FORMAT_TIME, seekFlag,
991 : GST_SEEK_TYPE_SET, position, GST_SEEK_TYPE_SET, stopPosition, nullptr))
992 : {
993 1 : RIALTO_SERVER_LOG_WARN("Segment seek failed.");
994 1 : m_gstWrapper->gstSegmentFree(segment);
995 1 : m_context.initialPositions.erase(initialPosition);
996 1 : return;
997 : }
998 5 : segment->applied_rate = appliedRate;
999 5 : RIALTO_SERVER_LOG_MIL("New %s segment: [%" GST_TIME_FORMAT ", %" GST_TIME_FORMAT
1000 : "], rate: %f, appliedRate %f, reset_time: %d\n",
1001 : typeStr.c_str(), GST_TIME_ARGS(segment->start), GST_TIME_ARGS(segment->stop),
1002 : segment->rate, segment->applied_rate, resetTime);
1003 :
1004 5 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(source));
1005 : // We can't pass buffer in GstSample, because implementation of gst_app_src_push_sample
1006 : // uses gst_buffer_copy, which loses RialtoProtectionMeta (that causes problems with EME
1007 : // for first frame).
1008 5 : GstSample *sample = m_gstWrapper->gstSampleNew(nullptr, currentCaps, segment, nullptr);
1009 5 : m_gstWrapper->gstAppSrcPushSample(GST_APP_SRC(source), sample);
1010 5 : m_gstWrapper->gstSampleUnref(sample);
1011 5 : m_gstWrapper->gstCapsUnref(currentCaps);
1012 :
1013 5 : m_gstWrapper->gstSegmentFree(segment);
1014 : }
1015 4 : m_context.currentPosition[source] = initialPosition->second.back();
1016 4 : m_context.initialPositions.erase(initialPosition);
1017 4 : return;
1018 : }
1019 :
1020 5 : void GstGenericPlayer::pushAdditionalSegmentIfRequired(GstElement *source)
1021 : {
1022 5 : auto currentPosition = m_context.currentPosition.find(source);
1023 5 : if (m_context.currentPosition.end() == currentPosition)
1024 : {
1025 4 : return;
1026 : }
1027 1 : auto initialPosition = m_context.initialPositions.find(source);
1028 1 : if (m_context.initialPositions.end() == initialPosition)
1029 : {
1030 0 : return;
1031 : }
1032 2 : if (initialPosition->second.size() == 1 && initialPosition->second.back().resetTime &&
1033 1 : currentPosition->second == initialPosition->second.back())
1034 : {
1035 1 : RIALTO_SERVER_LOG_INFO("Adding additional segment with reset_time = false");
1036 1 : SegmentData additionalSegment = initialPosition->second.back();
1037 1 : additionalSegment.resetTime = false;
1038 1 : initialPosition->second.push_back(additionalSegment);
1039 : }
1040 : }
1041 :
1042 2 : void GstGenericPlayer::setTextTrackPositionIfRequired(GstElement *source)
1043 : {
1044 2 : auto initialPosition = m_context.initialPositions.find(source);
1045 2 : if (m_context.initialPositions.end() == initialPosition)
1046 : {
1047 : // Sending initial sample not needed
1048 1 : return;
1049 : }
1050 :
1051 1 : RIALTO_SERVER_LOG_MIL("New subtitle position set %" GST_TIME_FORMAT,
1052 : GST_TIME_ARGS(initialPosition->second.back().position));
1053 1 : m_glibWrapper->gObjectSet(m_context.subtitleSink, "position",
1054 1 : static_cast<guint64>(initialPosition->second.back().position), nullptr);
1055 :
1056 1 : m_context.initialPositions.erase(initialPosition);
1057 : }
1058 :
1059 7 : bool GstGenericPlayer::reattachSource(const std::unique_ptr<IMediaPipeline::MediaSource> &source)
1060 : {
1061 7 : if (m_context.streamInfo.find(source->getType()) == m_context.streamInfo.end())
1062 : {
1063 1 : RIALTO_SERVER_LOG_ERROR("Unable to switch source, type does not exist");
1064 1 : return false;
1065 : }
1066 6 : if (source->getMimeType().empty())
1067 : {
1068 1 : RIALTO_SERVER_LOG_WARN("Skip switch audio source. Unknown mime type");
1069 1 : return false;
1070 : }
1071 5 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate> audioAttributes{createAudioAttributes(source)};
1072 5 : if (!audioAttributes)
1073 : {
1074 1 : RIALTO_SERVER_LOG_ERROR("Failed to create audio attributes");
1075 1 : return false;
1076 : }
1077 :
1078 4 : long long currentDispPts = getPosition(m_context.pipeline); // NOLINT(runtime/int)
1079 4 : GstCaps *caps{createCapsFromMediaSource(m_gstWrapper, m_glibWrapper, source)};
1080 4 : GstAppSrc *appSrc{GST_APP_SRC(m_context.streamInfo[source->getType()].appSrc)};
1081 4 : GstCaps *oldCaps = m_gstWrapper->gstAppSrcGetCaps(appSrc);
1082 4 : if ((!oldCaps) || (!m_gstWrapper->gstCapsIsEqual(caps, oldCaps)))
1083 : {
1084 3 : RIALTO_SERVER_LOG_DEBUG("Caps not equal. Perform audio track codec channel switch.");
1085 3 : int sampleAttributes{
1086 : 0}; // rdk_gstreamer_utils::performAudioTrackCodecChannelSwitch checks if this param != NULL only.
1087 3 : std::uint32_t status{0}; // must be 0 to make rdk_gstreamer_utils::performAudioTrackCodecChannelSwitch work
1088 3 : unsigned int ui32Delay{0}; // output param
1089 3 : long long audioChangeTargetPts{-1}; // NOLINT(runtime/int) output param. Set audioChangeTargetPts =
1090 : // currentDispPts in rdk_gstreamer_utils function stub
1091 3 : unsigned int audioChangeStage{0}; // Output param. Set to AUDCHG_ALIGN in rdk_gstreamer_utils function stub
1092 3 : gchar *oldCapsCStr = m_gstWrapper->gstCapsToString(oldCaps);
1093 3 : std::string oldCapsStr = std::string(oldCapsCStr);
1094 3 : m_glibWrapper->gFree(oldCapsCStr);
1095 3 : bool audioAac{oldCapsStr.find("audio/mpeg") != std::string::npos};
1096 3 : bool svpEnabled{true}; // assume always true
1097 3 : bool retVal{false}; // Output param. Set to TRUE in rdk_gstreamer_utils function stub
1098 : bool result =
1099 3 : m_rdkGstreamerUtilsWrapper
1100 6 : ->performAudioTrackCodecChannelSwitch(&m_context.playbackGroup, &sampleAttributes, &(*audioAttributes),
1101 : &status, &ui32Delay, &audioChangeTargetPts, ¤tDispPts,
1102 : &audioChangeStage,
1103 : &caps, // may fail for amlogic - that implementation changes
1104 : // this parameter, it's probably used by Netflix later
1105 3 : &audioAac, svpEnabled, GST_ELEMENT(appSrc), &retVal);
1106 :
1107 3 : if (!result || !retVal)
1108 : {
1109 3 : RIALTO_SERVER_LOG_WARN("performAudioTrackCodecChannelSwitch failed! Result: %d, retval %d", result, retVal);
1110 : }
1111 : }
1112 : else
1113 : {
1114 1 : RIALTO_SERVER_LOG_DEBUG("Skip switching audio source - caps are the same.");
1115 : }
1116 :
1117 4 : m_context.lastAudioSampleTimestamps = currentDispPts;
1118 4 : if (caps)
1119 4 : m_gstWrapper->gstCapsUnref(caps);
1120 4 : if (oldCaps)
1121 4 : m_gstWrapper->gstCapsUnref(oldCaps);
1122 :
1123 4 : return true;
1124 5 : }
1125 :
1126 0 : bool GstGenericPlayer::hasSourceType(const MediaSourceType &mediaSourceType) const
1127 : {
1128 0 : return m_context.streamInfo.find(mediaSourceType) != m_context.streamInfo.end();
1129 : }
1130 :
1131 88 : void GstGenericPlayer::scheduleNeedMediaData(GstAppSrc *src)
1132 : {
1133 88 : if (m_workerThread)
1134 : {
1135 88 : m_workerThread->enqueueTask(m_taskFactory->createNeedData(m_context, *this, src));
1136 : }
1137 : }
1138 :
1139 1 : void GstGenericPlayer::scheduleEnoughData(GstAppSrc *src)
1140 : {
1141 1 : if (m_workerThread)
1142 : {
1143 1 : m_workerThread->enqueueTask(m_taskFactory->createEnoughData(m_context, src));
1144 : }
1145 : }
1146 :
1147 3 : void GstGenericPlayer::scheduleAudioUnderflow()
1148 : {
1149 3 : if (m_workerThread)
1150 : {
1151 3 : bool underflowEnabled = m_context.isPlaying && !m_context.audioSourceRemoved;
1152 6 : m_workerThread->enqueueTask(
1153 6 : m_taskFactory->createUnderflow(m_context, *this, underflowEnabled, MediaSourceType::AUDIO));
1154 : }
1155 3 : }
1156 :
1157 2 : void GstGenericPlayer::scheduleVideoUnderflow()
1158 : {
1159 2 : if (m_workerThread)
1160 : {
1161 2 : bool underflowEnabled = m_context.isPlaying;
1162 4 : m_workerThread->enqueueTask(
1163 4 : m_taskFactory->createUnderflow(m_context, *this, underflowEnabled, MediaSourceType::VIDEO));
1164 : }
1165 2 : }
1166 :
1167 1 : void GstGenericPlayer::scheduleAllSourcesAttached()
1168 : {
1169 1 : allSourcesAttached();
1170 : }
1171 :
1172 14 : void GstGenericPlayer::cancelUnderflow(firebolt::rialto::MediaSourceType mediaSource)
1173 : {
1174 14 : auto elem = m_context.streamInfo.find(mediaSource);
1175 14 : if (elem != m_context.streamInfo.end())
1176 : {
1177 14 : StreamInfo &streamInfo = elem->second;
1178 14 : if (!streamInfo.underflowOccured)
1179 : {
1180 11 : return;
1181 : }
1182 :
1183 3 : RIALTO_SERVER_LOG_DEBUG("Cancelling %s underflow", common::convertMediaSourceType(mediaSource));
1184 3 : streamInfo.underflowOccured = false;
1185 : }
1186 : }
1187 :
1188 3 : void GstGenericPlayer::play(bool &async)
1189 : {
1190 3 : if (0 == m_ongoingStateChangesNumber)
1191 : {
1192 : // Operation called on main thread, because PAUSED->PLAYING change is synchronous and needs to be done fast.
1193 : //
1194 : // m_context.pipeline can be used, because it's modified only in GstGenericPlayer
1195 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
1196 2 : ++m_ongoingStateChangesNumber;
1197 2 : async = (changePipelineState(GST_STATE_PLAYING) == GST_STATE_CHANGE_ASYNC);
1198 2 : RIALTO_SERVER_LOG_MIL("State change to PLAYING requested");
1199 : }
1200 : else
1201 : {
1202 1 : ++m_ongoingStateChangesNumber;
1203 1 : async = true;
1204 1 : if (m_workerThread)
1205 : {
1206 1 : m_workerThread->enqueueTask(m_taskFactory->createPlay(*this));
1207 : }
1208 : }
1209 3 : }
1210 :
1211 2 : void GstGenericPlayer::pause()
1212 : {
1213 2 : ++m_ongoingStateChangesNumber;
1214 2 : if (m_workerThread)
1215 : {
1216 2 : m_workerThread->enqueueTask(m_taskFactory->createPause(m_context, *this));
1217 : }
1218 : }
1219 :
1220 1 : void GstGenericPlayer::stop()
1221 : {
1222 1 : ++m_ongoingStateChangesNumber;
1223 1 : if (m_workerThread)
1224 : {
1225 1 : m_workerThread->enqueueTask(m_taskFactory->createStop(m_context, *this));
1226 : }
1227 : }
1228 :
1229 6 : GstStateChangeReturn GstGenericPlayer::changePipelineState(GstState newState)
1230 : {
1231 6 : if (!m_context.pipeline)
1232 : {
1233 1 : RIALTO_SERVER_LOG_ERROR("Change state failed - pipeline is nullptr");
1234 1 : if (m_gstPlayerClient)
1235 1 : m_gstPlayerClient->notifyPlaybackState(PlaybackState::FAILURE);
1236 1 : --m_ongoingStateChangesNumber;
1237 1 : return GST_STATE_CHANGE_FAILURE;
1238 : }
1239 5 : const GstStateChangeReturn result{m_gstWrapper->gstElementSetState(m_context.pipeline, newState)};
1240 5 : if (result == GST_STATE_CHANGE_FAILURE)
1241 : {
1242 1 : RIALTO_SERVER_LOG_ERROR("Change state failed - Gstreamer returned an error");
1243 1 : if (m_gstPlayerClient)
1244 1 : m_gstPlayerClient->notifyPlaybackState(PlaybackState::FAILURE);
1245 : }
1246 5 : --m_ongoingStateChangesNumber;
1247 5 : return result;
1248 : }
1249 :
1250 15 : int64_t GstGenericPlayer::getPosition(GstElement *element)
1251 : {
1252 15 : if (!element)
1253 : {
1254 1 : RIALTO_SERVER_LOG_WARN("Element is null");
1255 1 : return -1;
1256 : }
1257 :
1258 14 : m_gstWrapper->gstStateLock(element);
1259 :
1260 28 : if (m_gstWrapper->gstElementGetState(element) < GST_STATE_PAUSED ||
1261 14 : (m_gstWrapper->gstElementGetStateReturn(element) == GST_STATE_CHANGE_ASYNC &&
1262 1 : m_gstWrapper->gstElementGetStateNext(element) == GST_STATE_PAUSED))
1263 : {
1264 1 : RIALTO_SERVER_LOG_WARN("Element is prerolling or in invalid state - state: %s, return: %s, next: %s",
1265 : m_gstWrapper->gstElementStateGetName(m_gstWrapper->gstElementGetState(element)),
1266 : m_gstWrapper->gstElementStateChangeReturnGetName(
1267 : m_gstWrapper->gstElementGetStateReturn(element)),
1268 : m_gstWrapper->gstElementStateGetName(m_gstWrapper->gstElementGetStateNext(element)));
1269 :
1270 1 : m_gstWrapper->gstStateUnlock(element);
1271 1 : return -1;
1272 : }
1273 13 : m_gstWrapper->gstStateUnlock(element);
1274 :
1275 13 : gint64 position = -1;
1276 13 : if (!m_gstWrapper->gstElementQueryPosition(m_context.pipeline, GST_FORMAT_TIME, &position))
1277 : {
1278 1 : RIALTO_SERVER_LOG_WARN("Failed to query position");
1279 1 : return -1;
1280 : }
1281 :
1282 12 : return position;
1283 : }
1284 :
1285 1 : void GstGenericPlayer::setVideoGeometry(int x, int y, int width, int height)
1286 : {
1287 1 : if (m_workerThread)
1288 : {
1289 2 : m_workerThread->enqueueTask(
1290 2 : m_taskFactory->createSetVideoGeometry(m_context, *this, Rectangle{x, y, width, height}));
1291 : }
1292 1 : }
1293 :
1294 1 : void GstGenericPlayer::setEos(const firebolt::rialto::MediaSourceType &type)
1295 : {
1296 1 : if (m_workerThread)
1297 : {
1298 1 : m_workerThread->enqueueTask(m_taskFactory->createEos(m_context, *this, type));
1299 : }
1300 : }
1301 :
1302 4 : bool GstGenericPlayer::setVideoSinkRectangle()
1303 : {
1304 4 : bool result = false;
1305 4 : GstElement *videoSink{getSink(MediaSourceType::VIDEO)};
1306 4 : if (videoSink)
1307 : {
1308 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "rectangle"))
1309 : {
1310 : std::string rect =
1311 4 : std::to_string(m_context.pendingGeometry.x) + ',' + std::to_string(m_context.pendingGeometry.y) + ',' +
1312 6 : std::to_string(m_context.pendingGeometry.width) + ',' + std::to_string(m_context.pendingGeometry.height);
1313 2 : m_glibWrapper->gObjectSet(videoSink, "rectangle", rect.c_str(), nullptr);
1314 2 : m_context.pendingGeometry.clear();
1315 2 : result = true;
1316 : }
1317 : else
1318 : {
1319 1 : RIALTO_SERVER_LOG_ERROR("Failed to set the video rectangle");
1320 : }
1321 3 : m_gstWrapper->gstObjectUnref(videoSink);
1322 : }
1323 : else
1324 : {
1325 1 : RIALTO_SERVER_LOG_ERROR("Failed to set video rectangle, sink is NULL");
1326 : }
1327 :
1328 4 : return result;
1329 : }
1330 :
1331 3 : bool GstGenericPlayer::setImmediateOutput()
1332 : {
1333 3 : bool result{false};
1334 3 : if (m_context.pendingImmediateOutputForVideo.has_value())
1335 : {
1336 3 : GstElement *sink{getSink(MediaSourceType::VIDEO)};
1337 3 : if (sink)
1338 : {
1339 2 : bool immediateOutput{m_context.pendingImmediateOutputForVideo.value()};
1340 2 : RIALTO_SERVER_LOG_DEBUG("Set immediate-output to %s", immediateOutput ? "TRUE" : "FALSE");
1341 :
1342 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "immediate-output"))
1343 : {
1344 1 : gboolean immediateOutputGboolean{immediateOutput ? TRUE : FALSE};
1345 1 : m_glibWrapper->gObjectSet(sink, "immediate-output", immediateOutputGboolean, nullptr);
1346 1 : result = true;
1347 : }
1348 : else
1349 : {
1350 1 : RIALTO_SERVER_LOG_ERROR("Failed to set immediate-output property on sink '%s'", GST_ELEMENT_NAME(sink));
1351 : }
1352 2 : m_context.pendingImmediateOutputForVideo.reset();
1353 2 : m_gstWrapper->gstObjectUnref(sink);
1354 : }
1355 : else
1356 : {
1357 1 : RIALTO_SERVER_LOG_DEBUG("Pending an immediate-output, sink is NULL");
1358 : }
1359 : }
1360 3 : return result;
1361 : }
1362 :
1363 4 : bool GstGenericPlayer::setShowVideoWindow()
1364 : {
1365 4 : if (!m_context.pendingShowVideoWindow.has_value())
1366 : {
1367 1 : RIALTO_SERVER_LOG_WARN("No show video window value to be set. Aborting...");
1368 1 : return false;
1369 : }
1370 :
1371 3 : GstElement *videoSink{getSink(MediaSourceType::VIDEO)};
1372 3 : if (!videoSink)
1373 : {
1374 1 : RIALTO_SERVER_LOG_DEBUG("Setting show video window queued. Video sink is NULL");
1375 1 : return false;
1376 : }
1377 2 : bool result{false};
1378 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "show-video-window"))
1379 : {
1380 1 : m_glibWrapper->gObjectSet(videoSink, "show-video-window", m_context.pendingShowVideoWindow.value(), nullptr);
1381 1 : result = true;
1382 : }
1383 : else
1384 : {
1385 1 : RIALTO_SERVER_LOG_ERROR("Setting show video window failed. Property does not exist");
1386 : }
1387 2 : m_context.pendingShowVideoWindow.reset();
1388 2 : m_gstWrapper->gstObjectUnref(GST_OBJECT(videoSink));
1389 2 : return result;
1390 : }
1391 :
1392 4 : bool GstGenericPlayer::setLowLatency()
1393 : {
1394 4 : bool result{false};
1395 4 : if (m_context.pendingLowLatency.has_value())
1396 : {
1397 4 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1398 4 : if (sink)
1399 : {
1400 3 : bool lowLatency{m_context.pendingLowLatency.value()};
1401 3 : RIALTO_SERVER_LOG_DEBUG("Set low-latency to %s", lowLatency ? "TRUE" : "FALSE");
1402 :
1403 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "low-latency"))
1404 : {
1405 2 : gboolean lowLatencyGboolean{lowLatency ? TRUE : FALSE};
1406 2 : m_glibWrapper->gObjectSet(sink, "low-latency", lowLatencyGboolean, nullptr);
1407 2 : result = true;
1408 : }
1409 : else
1410 : {
1411 1 : RIALTO_SERVER_LOG_ERROR("Failed to set low-latency property on sink '%s'", GST_ELEMENT_NAME(sink));
1412 : }
1413 3 : m_context.pendingLowLatency.reset();
1414 3 : m_gstWrapper->gstObjectUnref(sink);
1415 : }
1416 : else
1417 : {
1418 1 : RIALTO_SERVER_LOG_DEBUG("Pending low-latency, sink is NULL");
1419 : }
1420 : }
1421 4 : return result;
1422 : }
1423 :
1424 3 : bool GstGenericPlayer::setSync()
1425 : {
1426 3 : bool result{false};
1427 3 : if (m_context.pendingSync.has_value())
1428 : {
1429 3 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1430 3 : if (sink)
1431 : {
1432 2 : bool sync{m_context.pendingSync.value()};
1433 2 : RIALTO_SERVER_LOG_DEBUG("Set sync to %s", sync ? "TRUE" : "FALSE");
1434 :
1435 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "sync"))
1436 : {
1437 1 : gboolean syncGboolean{sync ? TRUE : FALSE};
1438 1 : m_glibWrapper->gObjectSet(sink, "sync", syncGboolean, nullptr);
1439 1 : result = true;
1440 : }
1441 : else
1442 : {
1443 1 : RIALTO_SERVER_LOG_ERROR("Failed to set sync property on sink '%s'", GST_ELEMENT_NAME(sink));
1444 : }
1445 2 : m_context.pendingSync.reset();
1446 2 : m_gstWrapper->gstObjectUnref(sink);
1447 : }
1448 : else
1449 : {
1450 1 : RIALTO_SERVER_LOG_DEBUG("Pending sync, sink is NULL");
1451 : }
1452 : }
1453 3 : return result;
1454 : }
1455 :
1456 3 : bool GstGenericPlayer::setSyncOff()
1457 : {
1458 3 : bool result{false};
1459 3 : if (m_context.pendingSyncOff.has_value())
1460 : {
1461 3 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1462 3 : if (decoder)
1463 : {
1464 2 : bool syncOff{m_context.pendingSyncOff.value()};
1465 2 : RIALTO_SERVER_LOG_DEBUG("Set sync-off to %s", syncOff ? "TRUE" : "FALSE");
1466 :
1467 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "sync-off"))
1468 : {
1469 1 : gboolean syncOffGboolean{decoder ? TRUE : FALSE};
1470 1 : m_glibWrapper->gObjectSet(decoder, "sync-off", syncOffGboolean, nullptr);
1471 1 : result = true;
1472 : }
1473 : else
1474 : {
1475 1 : RIALTO_SERVER_LOG_ERROR("Failed to set sync-off property on decoder '%s'", GST_ELEMENT_NAME(decoder));
1476 : }
1477 2 : m_context.pendingSyncOff.reset();
1478 2 : m_gstWrapper->gstObjectUnref(decoder);
1479 : }
1480 : else
1481 : {
1482 1 : RIALTO_SERVER_LOG_DEBUG("Pending sync-off, decoder is NULL");
1483 : }
1484 : }
1485 3 : return result;
1486 : }
1487 :
1488 6 : bool GstGenericPlayer::setStreamSyncMode(const MediaSourceType &type)
1489 : {
1490 6 : bool result{false};
1491 6 : int32_t streamSyncMode{0};
1492 : {
1493 6 : std::unique_lock lock{m_context.propertyMutex};
1494 6 : if (m_context.pendingStreamSyncMode.find(type) == m_context.pendingStreamSyncMode.end())
1495 : {
1496 0 : return false;
1497 : }
1498 6 : streamSyncMode = m_context.pendingStreamSyncMode[type];
1499 : }
1500 6 : if (MediaSourceType::AUDIO == type)
1501 : {
1502 3 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1503 3 : if (!decoder)
1504 : {
1505 1 : RIALTO_SERVER_LOG_DEBUG("Pending stream-sync-mode, decoder is NULL");
1506 1 : return false;
1507 : }
1508 :
1509 2 : RIALTO_SERVER_LOG_DEBUG("Set stream-sync-mode to %d", streamSyncMode);
1510 :
1511 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "stream-sync-mode"))
1512 : {
1513 1 : gint streamSyncModeGint{static_cast<gint>(streamSyncMode)};
1514 1 : m_glibWrapper->gObjectSet(decoder, "stream-sync-mode", streamSyncModeGint, nullptr);
1515 1 : result = true;
1516 : }
1517 : else
1518 : {
1519 1 : RIALTO_SERVER_LOG_ERROR("Failed to set stream-sync-mode property on decoder '%s'", GST_ELEMENT_NAME(decoder));
1520 : }
1521 2 : m_gstWrapper->gstObjectUnref(decoder);
1522 2 : std::unique_lock lock{m_context.propertyMutex};
1523 2 : m_context.pendingStreamSyncMode.erase(type);
1524 : }
1525 3 : else if (MediaSourceType::VIDEO == type)
1526 : {
1527 3 : GstElement *parser = getParser(MediaSourceType::VIDEO);
1528 3 : if (!parser)
1529 : {
1530 1 : RIALTO_SERVER_LOG_DEBUG("Pending syncmode-streaming, parser is NULL");
1531 1 : return false;
1532 : }
1533 :
1534 2 : gboolean streamSyncModeBoolean{static_cast<gboolean>(streamSyncMode)};
1535 2 : RIALTO_SERVER_LOG_DEBUG("Set syncmode-streaming to %d", streamSyncMode);
1536 :
1537 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(parser), "syncmode-streaming"))
1538 : {
1539 1 : m_glibWrapper->gObjectSet(parser, "syncmode-streaming", streamSyncModeBoolean, nullptr);
1540 1 : result = true;
1541 : }
1542 : else
1543 : {
1544 1 : RIALTO_SERVER_LOG_ERROR("Failed to set syncmode-streaming property on parser '%s'", GST_ELEMENT_NAME(parser));
1545 : }
1546 2 : m_gstWrapper->gstObjectUnref(parser);
1547 2 : std::unique_lock lock{m_context.propertyMutex};
1548 2 : m_context.pendingStreamSyncMode.erase(type);
1549 : }
1550 4 : return result;
1551 : }
1552 :
1553 3 : bool GstGenericPlayer::setRenderFrame()
1554 : {
1555 3 : bool result{false};
1556 3 : if (m_context.pendingRenderFrame)
1557 : {
1558 5 : static const std::string kStepOnPrerollPropertyName = "frame-step-on-preroll";
1559 3 : GstElement *sink{getSink(MediaSourceType::VIDEO)};
1560 3 : if (sink)
1561 : {
1562 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), kStepOnPrerollPropertyName.c_str()))
1563 : {
1564 1 : RIALTO_SERVER_LOG_INFO("Rendering preroll");
1565 :
1566 1 : m_glibWrapper->gObjectSet(sink, kStepOnPrerollPropertyName.c_str(), 1, nullptr);
1567 1 : m_gstWrapper->gstElementSendEvent(sink, m_gstWrapper->gstEventNewStep(GST_FORMAT_BUFFERS, 1, 1.0, true,
1568 : false));
1569 1 : m_glibWrapper->gObjectSet(sink, kStepOnPrerollPropertyName.c_str(), 0, nullptr);
1570 1 : result = true;
1571 : }
1572 : else
1573 : {
1574 1 : RIALTO_SERVER_LOG_ERROR("Video sink doesn't have property `%s`", kStepOnPrerollPropertyName.c_str());
1575 : }
1576 2 : m_gstWrapper->gstObjectUnref(sink);
1577 2 : m_context.pendingRenderFrame = false;
1578 : }
1579 : else
1580 : {
1581 1 : RIALTO_SERVER_LOG_DEBUG("Pending render frame, sink is NULL");
1582 : }
1583 : }
1584 3 : return result;
1585 : }
1586 :
1587 3 : bool GstGenericPlayer::setBufferingLimit()
1588 : {
1589 3 : bool result{false};
1590 3 : guint bufferingLimit{0};
1591 : {
1592 3 : std::unique_lock lock{m_context.propertyMutex};
1593 3 : if (!m_context.pendingBufferingLimit.has_value())
1594 : {
1595 0 : return false;
1596 : }
1597 3 : bufferingLimit = static_cast<guint>(m_context.pendingBufferingLimit.value());
1598 : }
1599 :
1600 3 : GstElement *decoder{getDecoder(MediaSourceType::AUDIO)};
1601 3 : if (decoder)
1602 : {
1603 2 : RIALTO_SERVER_LOG_DEBUG("Set limit-buffering-ms to %u", bufferingLimit);
1604 :
1605 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "limit-buffering-ms"))
1606 : {
1607 1 : m_glibWrapper->gObjectSet(decoder, "limit-buffering-ms", bufferingLimit, nullptr);
1608 1 : result = true;
1609 : }
1610 : else
1611 : {
1612 1 : RIALTO_SERVER_LOG_ERROR("Failed to set limit-buffering-ms property on decoder '%s'",
1613 : GST_ELEMENT_NAME(decoder));
1614 : }
1615 2 : m_gstWrapper->gstObjectUnref(decoder);
1616 2 : std::unique_lock lock{m_context.propertyMutex};
1617 2 : m_context.pendingBufferingLimit.reset();
1618 : }
1619 : else
1620 : {
1621 1 : RIALTO_SERVER_LOG_DEBUG("Pending limit-buffering-ms, decoder is NULL");
1622 : }
1623 3 : return result;
1624 : }
1625 :
1626 2 : bool GstGenericPlayer::setUseBuffering()
1627 : {
1628 2 : std::unique_lock lock{m_context.propertyMutex};
1629 2 : if (m_context.pendingUseBuffering.has_value())
1630 : {
1631 2 : if (m_context.playbackGroup.m_curAudioDecodeBin)
1632 : {
1633 1 : gboolean useBufferingGboolean{m_context.pendingUseBuffering.value() ? TRUE : FALSE};
1634 1 : RIALTO_SERVER_LOG_DEBUG("Set use-buffering to %d", useBufferingGboolean);
1635 1 : m_glibWrapper->gObjectSet(m_context.playbackGroup.m_curAudioDecodeBin, "use-buffering",
1636 : useBufferingGboolean, nullptr);
1637 1 : m_context.pendingUseBuffering.reset();
1638 1 : return true;
1639 : }
1640 : else
1641 : {
1642 1 : RIALTO_SERVER_LOG_DEBUG("Pending use-buffering, decodebin is NULL");
1643 : }
1644 : }
1645 1 : return false;
1646 2 : }
1647 :
1648 8 : bool GstGenericPlayer::setWesterossinkSecondaryVideo()
1649 : {
1650 8 : bool result = false;
1651 8 : GstElementFactory *factory = m_gstWrapper->gstElementFactoryFind("westerossink");
1652 8 : if (factory)
1653 : {
1654 7 : GstElement *videoSink = m_gstWrapper->gstElementFactoryCreate(factory, nullptr);
1655 7 : if (videoSink)
1656 : {
1657 5 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "res-usage"))
1658 : {
1659 4 : m_glibWrapper->gObjectSet(videoSink, "res-usage", 0x0u, nullptr);
1660 4 : m_glibWrapper->gObjectSet(m_context.pipeline, "video-sink", videoSink, nullptr);
1661 4 : result = true;
1662 : }
1663 : else
1664 : {
1665 1 : RIALTO_SERVER_LOG_ERROR("Failed to set the westerossink res-usage");
1666 1 : m_gstWrapper->gstObjectUnref(GST_OBJECT(videoSink));
1667 : }
1668 : }
1669 : else
1670 : {
1671 2 : RIALTO_SERVER_LOG_ERROR("Failed to create the westerossink");
1672 : }
1673 :
1674 7 : m_gstWrapper->gstObjectUnref(GST_OBJECT(factory));
1675 : }
1676 : else
1677 : {
1678 : // No westeros sink
1679 1 : result = true;
1680 : }
1681 :
1682 8 : return result;
1683 : }
1684 :
1685 8 : bool GstGenericPlayer::setErmContext()
1686 : {
1687 8 : bool result = false;
1688 8 : GstContext *context = m_gstWrapper->gstContextNew("erm", false);
1689 8 : if (context)
1690 : {
1691 6 : GstStructure *contextStructure = m_gstWrapper->gstContextWritableStructure(context);
1692 6 : if (contextStructure)
1693 : {
1694 5 : m_gstWrapper->gstStructureSet(contextStructure, "res-usage", G_TYPE_UINT, 0x0u, nullptr);
1695 5 : m_gstWrapper->gstElementSetContext(GST_ELEMENT(m_context.pipeline), context);
1696 5 : result = true;
1697 : }
1698 : else
1699 : {
1700 1 : RIALTO_SERVER_LOG_ERROR("Failed to create the erm structure");
1701 : }
1702 6 : m_gstWrapper->gstContextUnref(context);
1703 : }
1704 : else
1705 : {
1706 2 : RIALTO_SERVER_LOG_ERROR("Failed to create the erm context");
1707 : }
1708 :
1709 8 : return result;
1710 : }
1711 :
1712 6 : void GstGenericPlayer::startPositionReportingAndCheckAudioUnderflowTimer()
1713 : {
1714 6 : if (m_positionReportingAndCheckAudioUnderflowTimer && m_positionReportingAndCheckAudioUnderflowTimer->isActive())
1715 : {
1716 1 : return;
1717 : }
1718 :
1719 15 : m_positionReportingAndCheckAudioUnderflowTimer = m_timerFactory->createTimer(
1720 : kPositionReportTimerMs,
1721 10 : [this]()
1722 : {
1723 1 : if (m_workerThread)
1724 : {
1725 1 : m_workerThread->enqueueTask(m_taskFactory->createReportPosition(m_context, *this));
1726 1 : m_workerThread->enqueueTask(m_taskFactory->createCheckAudioUnderflow(m_context, *this));
1727 : }
1728 1 : },
1729 5 : firebolt::rialto::common::TimerType::PERIODIC);
1730 : }
1731 :
1732 4 : void GstGenericPlayer::stopPositionReportingAndCheckAudioUnderflowTimer()
1733 : {
1734 4 : if (m_positionReportingAndCheckAudioUnderflowTimer && m_positionReportingAndCheckAudioUnderflowTimer->isActive())
1735 : {
1736 1 : m_positionReportingAndCheckAudioUnderflowTimer->cancel();
1737 1 : m_positionReportingAndCheckAudioUnderflowTimer.reset();
1738 : }
1739 4 : }
1740 :
1741 6 : void GstGenericPlayer::startNotifyPlaybackInfoTimer()
1742 : {
1743 : static constexpr std::chrono::milliseconds kPlaybackInfoTimerMs{32};
1744 6 : if (m_playbackInfoTimer && m_playbackInfoTimer->isActive())
1745 : {
1746 1 : return;
1747 : }
1748 :
1749 5 : notifyPlaybackInfo();
1750 :
1751 : m_playbackInfoTimer =
1752 5 : m_timerFactory
1753 6 : ->createTimer(kPlaybackInfoTimerMs, [this]() { notifyPlaybackInfo(); }, firebolt::rialto::common::TimerType::PERIODIC);
1754 : }
1755 :
1756 3 : void GstGenericPlayer::stopNotifyPlaybackInfoTimer()
1757 : {
1758 3 : if (m_playbackInfoTimer && m_playbackInfoTimer->isActive())
1759 : {
1760 1 : m_playbackInfoTimer->cancel();
1761 1 : m_playbackInfoTimer.reset();
1762 : }
1763 3 : }
1764 :
1765 0 : void GstGenericPlayer::startSubtitleClockResyncTimer()
1766 : {
1767 0 : if (m_subtitleClockResyncTimer && m_subtitleClockResyncTimer->isActive())
1768 : {
1769 0 : return;
1770 : }
1771 :
1772 0 : m_subtitleClockResyncTimer = m_timerFactory->createTimer(
1773 : kSubtitleClockResyncInterval,
1774 0 : [this]()
1775 : {
1776 0 : if (m_workerThread)
1777 : {
1778 0 : m_workerThread->enqueueTask(m_taskFactory->createSynchroniseSubtitleClock(m_context, *this));
1779 : }
1780 0 : },
1781 0 : firebolt::rialto::common::TimerType::PERIODIC);
1782 : }
1783 :
1784 0 : void GstGenericPlayer::stopSubtitleClockResyncTimer()
1785 : {
1786 0 : if (m_subtitleClockResyncTimer && m_subtitleClockResyncTimer->isActive())
1787 : {
1788 0 : m_subtitleClockResyncTimer->cancel();
1789 0 : m_subtitleClockResyncTimer.reset();
1790 : }
1791 : }
1792 :
1793 2 : void GstGenericPlayer::stopWorkerThread()
1794 : {
1795 2 : if (m_workerThread)
1796 : {
1797 2 : m_workerThread->stop();
1798 : }
1799 : }
1800 :
1801 0 : void GstGenericPlayer::setPendingPlaybackRate()
1802 : {
1803 0 : RIALTO_SERVER_LOG_INFO("Setting pending playback rate");
1804 0 : setPlaybackRate(m_context.pendingPlaybackRate);
1805 : }
1806 :
1807 1 : void GstGenericPlayer::renderFrame()
1808 : {
1809 1 : if (m_workerThread)
1810 : {
1811 1 : m_workerThread->enqueueTask(m_taskFactory->createRenderFrame(m_context, *this));
1812 : }
1813 : }
1814 :
1815 18 : void GstGenericPlayer::setVolume(double targetVolume, uint32_t volumeDuration, firebolt::rialto::EaseType easeType)
1816 : {
1817 18 : if (m_workerThread)
1818 : {
1819 36 : m_workerThread->enqueueTask(
1820 36 : m_taskFactory->createSetVolume(m_context, *this, targetVolume, volumeDuration, easeType));
1821 : }
1822 18 : }
1823 :
1824 9 : bool GstGenericPlayer::getVolume(double ¤tVolume)
1825 : {
1826 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
1827 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
1828 9 : if (!m_context.pipeline)
1829 : {
1830 0 : return false;
1831 : }
1832 :
1833 : // NOTE: No gstreamer documentation for "fade-volume" could be found at the time this code was written.
1834 : // Therefore the author performed several tests on a supported platform (Flex2) to determine the behaviour of this property.
1835 : // The code has been written to be backwardly compatible on platforms that don't have this property.
1836 : // The observed behaviour was:
1837 : // - if the returned fade volume is negative then audio-fade is not active. In this case the usual technique
1838 : // to find volume in the pipeline works and is used.
1839 : // - if the returned fade volume is positive then audio-fade is active. In this case the returned fade volume
1840 : // directly returns the current volume level 0=min to 100=max (and the pipeline's current volume level is
1841 : // meaningless and doesn't contribute in this case).
1842 9 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1843 11 : if (m_context.audioFadeEnabled && sink &&
1844 2 : m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "fade-volume"))
1845 : {
1846 2 : gint fadeVolume{-100};
1847 2 : m_glibWrapper->gObjectGet(sink, "fade-volume", &fadeVolume, NULL);
1848 2 : if (fadeVolume < 0)
1849 : {
1850 1 : currentVolume = m_gstWrapper->gstStreamVolumeGetVolume(GST_STREAM_VOLUME(m_context.pipeline),
1851 : GST_STREAM_VOLUME_FORMAT_LINEAR);
1852 1 : RIALTO_SERVER_LOG_INFO("Fade volume is negative, using volume from pipeline: %f", currentVolume);
1853 : }
1854 : else
1855 : {
1856 1 : currentVolume = static_cast<double>(fadeVolume) / 100.0;
1857 1 : RIALTO_SERVER_LOG_INFO("Fade volume is supported: %f", currentVolume);
1858 : }
1859 : }
1860 : else
1861 : {
1862 7 : currentVolume = m_gstWrapper->gstStreamVolumeGetVolume(GST_STREAM_VOLUME(m_context.pipeline),
1863 : GST_STREAM_VOLUME_FORMAT_LINEAR);
1864 7 : RIALTO_SERVER_LOG_INFO("Fade volume is not supported, using volume from pipeline: %f", currentVolume);
1865 : }
1866 :
1867 9 : if (sink)
1868 2 : m_gstWrapper->gstObjectUnref(sink);
1869 :
1870 9 : return true;
1871 : }
1872 :
1873 1 : void GstGenericPlayer::setMute(const MediaSourceType &mediaSourceType, bool mute)
1874 : {
1875 1 : if (m_workerThread)
1876 : {
1877 1 : m_workerThread->enqueueTask(m_taskFactory->createSetMute(m_context, *this, mediaSourceType, mute));
1878 : }
1879 : }
1880 :
1881 5 : bool GstGenericPlayer::getMute(const MediaSourceType &mediaSourceType, bool &mute)
1882 : {
1883 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
1884 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
1885 5 : if (mediaSourceType == MediaSourceType::SUBTITLE)
1886 : {
1887 2 : if (!m_context.subtitleSink)
1888 : {
1889 1 : RIALTO_SERVER_LOG_ERROR("There is no subtitle sink");
1890 1 : return false;
1891 : }
1892 1 : gboolean muteValue{FALSE};
1893 1 : m_glibWrapper->gObjectGet(m_context.subtitleSink, "mute", &muteValue, nullptr);
1894 1 : mute = muteValue;
1895 : }
1896 3 : else if (mediaSourceType == MediaSourceType::AUDIO)
1897 : {
1898 2 : if (!m_context.pipeline)
1899 : {
1900 1 : return false;
1901 : }
1902 1 : mute = m_gstWrapper->gstStreamVolumeGetMute(GST_STREAM_VOLUME(m_context.pipeline));
1903 : }
1904 : else
1905 : {
1906 1 : RIALTO_SERVER_LOG_ERROR("Getting mute for type %s unsupported", common::convertMediaSourceType(mediaSourceType));
1907 1 : return false;
1908 : }
1909 :
1910 2 : return true;
1911 : }
1912 :
1913 1 : bool GstGenericPlayer::isAsync(const MediaSourceType &mediaSourceType) const
1914 : {
1915 1 : GstElement *sink = getSink(mediaSourceType);
1916 1 : if (!sink)
1917 : {
1918 0 : RIALTO_SERVER_LOG_WARN("Sink not found for %s", common::convertMediaSourceType(mediaSourceType));
1919 0 : return true; // Our sinks are async by default
1920 : }
1921 1 : gboolean returnValue{TRUE};
1922 1 : m_glibWrapper->gObjectGet(sink, "async", &returnValue, nullptr);
1923 1 : m_gstWrapper->gstObjectUnref(sink);
1924 1 : return returnValue == TRUE;
1925 : }
1926 :
1927 1 : void GstGenericPlayer::setTextTrackIdentifier(const std::string &textTrackIdentifier)
1928 : {
1929 1 : if (m_workerThread)
1930 : {
1931 1 : m_workerThread->enqueueTask(m_taskFactory->createSetTextTrackIdentifier(m_context, textTrackIdentifier));
1932 : }
1933 : }
1934 :
1935 3 : bool GstGenericPlayer::getTextTrackIdentifier(std::string &textTrackIdentifier)
1936 : {
1937 3 : if (!m_context.subtitleSink)
1938 : {
1939 1 : RIALTO_SERVER_LOG_ERROR("There is no subtitle sink");
1940 1 : return false;
1941 : }
1942 :
1943 2 : gchar *identifier = nullptr;
1944 2 : m_glibWrapper->gObjectGet(m_context.subtitleSink, "text-track-identifier", &identifier, nullptr);
1945 :
1946 2 : if (identifier)
1947 : {
1948 1 : textTrackIdentifier = identifier;
1949 1 : m_glibWrapper->gFree(identifier);
1950 1 : return true;
1951 : }
1952 : else
1953 : {
1954 1 : RIALTO_SERVER_LOG_ERROR("Failed to get text track identifier");
1955 1 : return false;
1956 : }
1957 : }
1958 :
1959 1 : bool GstGenericPlayer::setLowLatency(bool lowLatency)
1960 : {
1961 1 : if (m_workerThread)
1962 : {
1963 1 : m_workerThread->enqueueTask(m_taskFactory->createSetLowLatency(m_context, *this, lowLatency));
1964 : }
1965 1 : return true;
1966 : }
1967 :
1968 1 : bool GstGenericPlayer::setSync(bool sync)
1969 : {
1970 1 : if (m_workerThread)
1971 : {
1972 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSync(m_context, *this, sync));
1973 : }
1974 1 : return true;
1975 : }
1976 :
1977 4 : bool GstGenericPlayer::getSync(bool &sync)
1978 : {
1979 4 : bool returnValue{false};
1980 4 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1981 4 : if (sink)
1982 : {
1983 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "sync"))
1984 : {
1985 1 : m_glibWrapper->gObjectGet(sink, "sync", &sync, nullptr);
1986 1 : returnValue = true;
1987 : }
1988 : else
1989 : {
1990 1 : RIALTO_SERVER_LOG_ERROR("Sync not supported in sink '%s'", GST_ELEMENT_NAME(sink));
1991 : }
1992 2 : m_gstWrapper->gstObjectUnref(sink);
1993 : }
1994 2 : else if (m_context.pendingSync.has_value())
1995 : {
1996 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
1997 1 : sync = m_context.pendingSync.value();
1998 1 : returnValue = true;
1999 : }
2000 : else
2001 : {
2002 : // We dont know the default setting on the sync, so return failure here
2003 1 : RIALTO_SERVER_LOG_WARN("No audio sink attached or queued value");
2004 : }
2005 :
2006 4 : return returnValue;
2007 : }
2008 :
2009 1 : bool GstGenericPlayer::setSyncOff(bool syncOff)
2010 : {
2011 1 : if (m_workerThread)
2012 : {
2013 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSyncOff(m_context, *this, syncOff));
2014 : }
2015 1 : return true;
2016 : }
2017 :
2018 1 : bool GstGenericPlayer::setStreamSyncMode(const MediaSourceType &mediaSourceType, int32_t streamSyncMode)
2019 : {
2020 1 : if (m_workerThread)
2021 : {
2022 2 : m_workerThread->enqueueTask(
2023 2 : m_taskFactory->createSetStreamSyncMode(m_context, *this, mediaSourceType, streamSyncMode));
2024 : }
2025 1 : return true;
2026 : }
2027 :
2028 5 : bool GstGenericPlayer::getStreamSyncMode(int32_t &streamSyncMode)
2029 : {
2030 5 : bool returnValue{false};
2031 5 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
2032 5 : if (decoder && m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "stream-sync-mode"))
2033 : {
2034 2 : m_glibWrapper->gObjectGet(decoder, "stream-sync-mode", &streamSyncMode, nullptr);
2035 2 : returnValue = true;
2036 : }
2037 : else
2038 : {
2039 3 : std::unique_lock lock{m_context.propertyMutex};
2040 3 : if (m_context.pendingStreamSyncMode.find(MediaSourceType::AUDIO) != m_context.pendingStreamSyncMode.end())
2041 : {
2042 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
2043 1 : streamSyncMode = m_context.pendingStreamSyncMode[MediaSourceType::AUDIO];
2044 1 : returnValue = true;
2045 : }
2046 : else
2047 : {
2048 2 : RIALTO_SERVER_LOG_ERROR("Stream sync mode not supported in decoder '%s'",
2049 : (decoder ? GST_ELEMENT_NAME(decoder) : "null"));
2050 : }
2051 3 : }
2052 :
2053 5 : if (decoder)
2054 3 : m_gstWrapper->gstObjectUnref(GST_OBJECT(decoder));
2055 :
2056 5 : return returnValue;
2057 : }
2058 :
2059 1 : void GstGenericPlayer::ping(std::unique_ptr<IHeartbeatHandler> &&heartbeatHandler)
2060 : {
2061 1 : if (m_workerThread)
2062 : {
2063 1 : m_workerThread->enqueueTask(m_taskFactory->createPing(std::move(heartbeatHandler)));
2064 : }
2065 : }
2066 :
2067 1 : void GstGenericPlayer::flush(const MediaSourceType &mediaSourceType, bool resetTime, bool &async)
2068 : {
2069 1 : if (m_workerThread)
2070 : {
2071 1 : async = isAsync(mediaSourceType);
2072 1 : m_flushWatcher->setFlushing(mediaSourceType, async);
2073 1 : m_workerThread->enqueueTask(m_taskFactory->createFlush(m_context, *this, mediaSourceType, resetTime));
2074 : }
2075 : }
2076 :
2077 1 : void GstGenericPlayer::setSourcePosition(const MediaSourceType &mediaSourceType, int64_t position, bool resetTime,
2078 : double appliedRate, uint64_t stopPosition)
2079 : {
2080 1 : if (m_workerThread)
2081 : {
2082 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSourcePosition(m_context, mediaSourceType, position,
2083 : resetTime, appliedRate, stopPosition));
2084 : }
2085 : }
2086 :
2087 0 : void GstGenericPlayer::setSubtitleOffset(int64_t position)
2088 : {
2089 0 : if (m_workerThread)
2090 : {
2091 0 : m_workerThread->enqueueTask(m_taskFactory->createSetSubtitleOffset(m_context, position));
2092 : }
2093 : }
2094 :
2095 1 : void GstGenericPlayer::processAudioGap(int64_t position, uint32_t duration, int64_t discontinuityGap, bool audioAac)
2096 : {
2097 1 : if (m_workerThread)
2098 : {
2099 2 : m_workerThread->enqueueTask(
2100 2 : m_taskFactory->createProcessAudioGap(m_context, position, duration, discontinuityGap, audioAac));
2101 : }
2102 1 : }
2103 :
2104 1 : void GstGenericPlayer::setBufferingLimit(uint32_t limitBufferingMs)
2105 : {
2106 1 : if (m_workerThread)
2107 : {
2108 1 : m_workerThread->enqueueTask(m_taskFactory->createSetBufferingLimit(m_context, *this, limitBufferingMs));
2109 : }
2110 : }
2111 :
2112 5 : bool GstGenericPlayer::getBufferingLimit(uint32_t &limitBufferingMs)
2113 : {
2114 5 : bool returnValue{false};
2115 5 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
2116 5 : if (decoder && m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "limit-buffering-ms"))
2117 : {
2118 2 : m_glibWrapper->gObjectGet(decoder, "limit-buffering-ms", &limitBufferingMs, nullptr);
2119 2 : returnValue = true;
2120 : }
2121 : else
2122 : {
2123 3 : std::unique_lock lock{m_context.propertyMutex};
2124 3 : if (m_context.pendingBufferingLimit.has_value())
2125 : {
2126 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
2127 1 : limitBufferingMs = m_context.pendingBufferingLimit.value();
2128 1 : returnValue = true;
2129 : }
2130 : else
2131 : {
2132 2 : RIALTO_SERVER_LOG_ERROR("buffering limit not supported in decoder '%s'",
2133 : (decoder ? GST_ELEMENT_NAME(decoder) : "null"));
2134 : }
2135 3 : }
2136 :
2137 5 : if (decoder)
2138 3 : m_gstWrapper->gstObjectUnref(GST_OBJECT(decoder));
2139 :
2140 5 : return returnValue;
2141 : }
2142 :
2143 1 : void GstGenericPlayer::setUseBuffering(bool useBuffering)
2144 : {
2145 1 : if (m_workerThread)
2146 : {
2147 1 : m_workerThread->enqueueTask(m_taskFactory->createSetUseBuffering(m_context, *this, useBuffering));
2148 : }
2149 : }
2150 :
2151 3 : bool GstGenericPlayer::getUseBuffering(bool &useBuffering)
2152 : {
2153 3 : if (m_context.playbackGroup.m_curAudioDecodeBin)
2154 : {
2155 1 : m_glibWrapper->gObjectGet(m_context.playbackGroup.m_curAudioDecodeBin, "use-buffering", &useBuffering, nullptr);
2156 1 : return true;
2157 : }
2158 : else
2159 : {
2160 2 : std::unique_lock lock{m_context.propertyMutex};
2161 2 : if (m_context.pendingUseBuffering.has_value())
2162 : {
2163 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
2164 1 : useBuffering = m_context.pendingUseBuffering.value();
2165 1 : return true;
2166 : }
2167 2 : }
2168 1 : return false;
2169 : }
2170 :
2171 1 : void GstGenericPlayer::switchSource(const std::unique_ptr<IMediaPipeline::MediaSource> &mediaSource)
2172 : {
2173 1 : if (m_workerThread)
2174 : {
2175 1 : m_workerThread->enqueueTask(m_taskFactory->createSwitchSource(*this, mediaSource));
2176 : }
2177 : }
2178 :
2179 1 : void GstGenericPlayer::handleBusMessage(GstMessage *message)
2180 : {
2181 1 : m_workerThread->enqueueTask(m_taskFactory->createHandleBusMessage(m_context, *this, message, *m_flushWatcher));
2182 : }
2183 :
2184 1 : void GstGenericPlayer::updatePlaybackGroup(GstElement *typefind, const GstCaps *caps)
2185 : {
2186 1 : m_workerThread->enqueueTask(m_taskFactory->createUpdatePlaybackGroup(m_context, *this, typefind, caps));
2187 : }
2188 :
2189 3 : void GstGenericPlayer::addAutoVideoSinkChild(GObject *object)
2190 : {
2191 : // Only add children that are sinks
2192 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2193 : {
2194 2 : RIALTO_SERVER_LOG_DEBUG("Store AutoVideoSink child sink");
2195 :
2196 2 : if (m_context.autoVideoChildSink && m_context.autoVideoChildSink != GST_ELEMENT(object))
2197 : {
2198 1 : RIALTO_SERVER_LOG_MIL("AutoVideoSink child is been overwritten");
2199 : }
2200 2 : m_context.autoVideoChildSink = GST_ELEMENT(object);
2201 : }
2202 3 : }
2203 :
2204 3 : void GstGenericPlayer::addAutoAudioSinkChild(GObject *object)
2205 : {
2206 : // Only add children that are sinks
2207 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2208 : {
2209 2 : RIALTO_SERVER_LOG_DEBUG("Store AutoAudioSink child sink");
2210 :
2211 2 : if (m_context.autoAudioChildSink && m_context.autoAudioChildSink != GST_ELEMENT(object))
2212 : {
2213 1 : RIALTO_SERVER_LOG_MIL("AutoAudioSink child is been overwritten");
2214 : }
2215 2 : m_context.autoAudioChildSink = GST_ELEMENT(object);
2216 : }
2217 3 : }
2218 :
2219 3 : void GstGenericPlayer::removeAutoVideoSinkChild(GObject *object)
2220 : {
2221 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2222 : {
2223 3 : RIALTO_SERVER_LOG_DEBUG("Remove AutoVideoSink child sink");
2224 :
2225 3 : if (m_context.autoVideoChildSink && m_context.autoVideoChildSink != GST_ELEMENT(object))
2226 : {
2227 1 : RIALTO_SERVER_LOG_MIL("AutoVideoSink child sink is not the same as the one stored");
2228 1 : return;
2229 : }
2230 :
2231 2 : m_context.autoVideoChildSink = nullptr;
2232 : }
2233 : }
2234 :
2235 3 : void GstGenericPlayer::removeAutoAudioSinkChild(GObject *object)
2236 : {
2237 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2238 : {
2239 3 : RIALTO_SERVER_LOG_DEBUG("Remove AutoAudioSink child sink");
2240 :
2241 3 : if (m_context.autoAudioChildSink && m_context.autoAudioChildSink != GST_ELEMENT(object))
2242 : {
2243 1 : RIALTO_SERVER_LOG_MIL("AutoAudioSink child sink is not the same as the one stored");
2244 1 : return;
2245 : }
2246 :
2247 2 : m_context.autoAudioChildSink = nullptr;
2248 : }
2249 : }
2250 :
2251 14 : GstElement *GstGenericPlayer::getSinkChildIfAutoVideoSink(GstElement *sink) const
2252 : {
2253 14 : const gchar *kTmpName = m_glibWrapper->gTypeName(G_OBJECT_TYPE(sink));
2254 14 : if (!kTmpName)
2255 0 : return sink;
2256 :
2257 28 : const std::string kElementTypeName{kTmpName};
2258 14 : if (kElementTypeName == "GstAutoVideoSink")
2259 : {
2260 1 : if (!m_context.autoVideoChildSink)
2261 : {
2262 0 : RIALTO_SERVER_LOG_WARN("No child sink has been added to the autovideosink");
2263 : }
2264 : else
2265 : {
2266 1 : return m_context.autoVideoChildSink;
2267 : }
2268 : }
2269 13 : return sink;
2270 14 : }
2271 :
2272 11 : GstElement *GstGenericPlayer::getSinkChildIfAutoAudioSink(GstElement *sink) const
2273 : {
2274 11 : const gchar *kTmpName = m_glibWrapper->gTypeName(G_OBJECT_TYPE(sink));
2275 11 : if (!kTmpName)
2276 0 : return sink;
2277 :
2278 22 : const std::string kElementTypeName{kTmpName};
2279 11 : if (kElementTypeName == "GstAutoAudioSink")
2280 : {
2281 1 : if (!m_context.autoAudioChildSink)
2282 : {
2283 0 : RIALTO_SERVER_LOG_WARN("No child sink has been added to the autoaudiosink");
2284 : }
2285 : else
2286 : {
2287 1 : return m_context.autoAudioChildSink;
2288 : }
2289 : }
2290 10 : return sink;
2291 11 : }
2292 :
2293 215 : void GstGenericPlayer::setPlaybinFlags(bool enableAudio)
2294 : {
2295 215 : unsigned flags = getGstPlayFlag("video") | getGstPlayFlag("native-video") | getGstPlayFlag("text");
2296 :
2297 215 : if (enableAudio)
2298 : {
2299 215 : flags |= getGstPlayFlag("audio");
2300 215 : flags |= shouldEnableNativeAudio() ? getGstPlayFlag("native-audio") : 0;
2301 : }
2302 :
2303 215 : m_glibWrapper->gObjectSet(m_context.pipeline, "flags", flags, nullptr);
2304 : }
2305 :
2306 215 : bool GstGenericPlayer::shouldEnableNativeAudio()
2307 : {
2308 215 : GstElementFactory *factory = m_gstWrapper->gstElementFactoryFind("brcmaudiosink");
2309 215 : if (factory)
2310 : {
2311 1 : m_gstWrapper->gstObjectUnref(GST_OBJECT(factory));
2312 1 : return true;
2313 : }
2314 214 : return false;
2315 : }
2316 :
2317 : }; // namespace firebolt::rialto::server
|