Line data Source code
1 : /*
2 : * If not stated otherwise in this file or this component's LICENSE file the
3 : * following copyright and licenses apply:
4 : *
5 : * Copyright 2022 Sky UK
6 : *
7 : * Licensed under the Apache License, Version 2.0 (the "License");
8 : * you may not use this file except in compliance with the License.
9 : * You may obtain a copy of the License at
10 : *
11 : * http://www.apache.org/licenses/LICENSE-2.0
12 : *
13 : * Unless required by applicable law or agreed to in writing, software
14 : * distributed under the License is distributed on an "AS IS" BASIS,
15 : * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 : * See the License for the specific language governing permissions and
17 : * limitations under the License.
18 : */
19 :
20 : #include <chrono>
21 : #include <cinttypes>
22 : #include <stdexcept>
23 :
24 : #include "GstDispatcherThread.h"
25 : #include "GstGenericPlayer.h"
26 : #include "GstProtectionMetadata.h"
27 : #include "IGstTextTrackSinkFactory.h"
28 : #include "IMediaPipeline.h"
29 : #include "ITimer.h"
30 : #include "RialtoServerLogging.h"
31 : #include "TypeConverters.h"
32 : #include "Utils.h"
33 : #include "WorkerThread.h"
34 : #include "tasks/generic/GenericPlayerTaskFactory.h"
35 :
36 : namespace
37 : {
38 : /**
39 : * @brief Report position interval in ms.
40 : * The position reporting timer should be started whenever the PLAYING state is entered and stopped
41 : * whenever the session moves to another playback state.
42 : */
43 : constexpr std::chrono::milliseconds kPositionReportTimerMs{250};
44 : } // namespace
45 :
46 : namespace firebolt::rialto::server
47 : {
48 : std::weak_ptr<IGstGenericPlayerFactory> GstGenericPlayerFactory::m_factory;
49 :
50 3 : std::shared_ptr<IGstGenericPlayerFactory> IGstGenericPlayerFactory::getFactory()
51 : {
52 3 : std::shared_ptr<IGstGenericPlayerFactory> factory = GstGenericPlayerFactory::m_factory.lock();
53 :
54 3 : if (!factory)
55 : {
56 : try
57 : {
58 3 : factory = std::make_shared<GstGenericPlayerFactory>();
59 : }
60 0 : catch (const std::exception &e)
61 : {
62 0 : RIALTO_SERVER_LOG_ERROR("Failed to create the gstreamer player factory, reason: %s", e.what());
63 : }
64 :
65 3 : GstGenericPlayerFactory::m_factory = factory;
66 : }
67 :
68 3 : return factory;
69 : }
70 :
71 1 : std::unique_ptr<IGstGenericPlayer> GstGenericPlayerFactory::createGstGenericPlayer(
72 : IGstGenericPlayerClient *client, IDecryptionService &decryptionService, MediaType type,
73 : const VideoRequirements &videoRequirements,
74 : const std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapperFactory> &rdkGstreamerUtilsWrapperFactory)
75 : {
76 1 : std::unique_ptr<IGstGenericPlayer> gstPlayer;
77 :
78 : try
79 : {
80 1 : auto gstWrapperFactory = firebolt::rialto::wrappers::IGstWrapperFactory::getFactory();
81 1 : auto glibWrapperFactory = firebolt::rialto::wrappers::IGlibWrapperFactory::getFactory();
82 1 : std::shared_ptr<firebolt::rialto::wrappers::IGstWrapper> gstWrapper;
83 1 : std::shared_ptr<firebolt::rialto::wrappers::IGlibWrapper> glibWrapper;
84 1 : std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapper> rdkGstreamerUtilsWrapper;
85 1 : if ((!gstWrapperFactory) || (!(gstWrapper = gstWrapperFactory->getGstWrapper())))
86 : {
87 0 : throw std::runtime_error("Cannot create GstWrapper");
88 : }
89 1 : if ((!glibWrapperFactory) || (!(glibWrapper = glibWrapperFactory->getGlibWrapper())))
90 : {
91 0 : throw std::runtime_error("Cannot create GlibWrapper");
92 : }
93 2 : if ((!rdkGstreamerUtilsWrapperFactory) ||
94 2 : (!(rdkGstreamerUtilsWrapper = rdkGstreamerUtilsWrapperFactory->createRdkGstreamerUtilsWrapper())))
95 : {
96 0 : throw std::runtime_error("Cannot create RdkGstreamerUtilsWrapper");
97 : }
98 : gstPlayer = std::make_unique<
99 2 : GstGenericPlayer>(client, decryptionService, type, videoRequirements, gstWrapper, glibWrapper,
100 2 : rdkGstreamerUtilsWrapper, IGstInitialiser::instance(), IGstSrcFactory::getFactory(),
101 2 : common::ITimerFactory::getFactory(),
102 2 : std::make_unique<GenericPlayerTaskFactory>(client, gstWrapper, glibWrapper,
103 : rdkGstreamerUtilsWrapper,
104 2 : IGstTextTrackSinkFactory::createFactory()),
105 2 : std::make_unique<WorkerThreadFactory>(), std::make_unique<GstDispatcherThreadFactory>(),
106 3 : IGstProtectionMetadataHelperFactory::createFactory());
107 1 : }
108 0 : catch (const std::exception &e)
109 : {
110 0 : RIALTO_SERVER_LOG_ERROR("Failed to create the gstreamer player, reason: %s", e.what());
111 : }
112 :
113 1 : return gstPlayer;
114 : }
115 :
116 204 : GstGenericPlayer::GstGenericPlayer(
117 : IGstGenericPlayerClient *client, IDecryptionService &decryptionService, MediaType type,
118 : const VideoRequirements &videoRequirements,
119 : const std::shared_ptr<firebolt::rialto::wrappers::IGstWrapper> &gstWrapper,
120 : const std::shared_ptr<firebolt::rialto::wrappers::IGlibWrapper> &glibWrapper,
121 : const std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapper> &rdkGstreamerUtilsWrapper,
122 : const IGstInitialiser &gstInitialiser, const std::shared_ptr<IGstSrcFactory> &gstSrcFactory,
123 : std::shared_ptr<common::ITimerFactory> timerFactory, std::unique_ptr<IGenericPlayerTaskFactory> taskFactory,
124 : std::unique_ptr<IWorkerThreadFactory> workerThreadFactory,
125 : std::unique_ptr<IGstDispatcherThreadFactory> gstDispatcherThreadFactory,
126 204 : std::shared_ptr<IGstProtectionMetadataHelperFactory> gstProtectionMetadataFactory)
127 204 : : m_gstPlayerClient(client), m_gstWrapper{gstWrapper}, m_glibWrapper{glibWrapper},
128 612 : m_rdkGstreamerUtilsWrapper{rdkGstreamerUtilsWrapper}, m_timerFactory{timerFactory}, m_taskFactory{
129 816 : std::move(taskFactory)}
130 : {
131 204 : RIALTO_SERVER_LOG_DEBUG("GstGenericPlayer is constructed.");
132 :
133 204 : gstInitialiser.waitForInitialisation();
134 :
135 204 : m_context.decryptionService = &decryptionService;
136 :
137 204 : if ((!gstSrcFactory) || (!(m_context.gstSrc = gstSrcFactory->getGstSrc())))
138 : {
139 2 : throw std::runtime_error("Cannot create GstSrc");
140 : }
141 :
142 202 : if (!timerFactory)
143 : {
144 1 : throw std::runtime_error("TimeFactory is invalid");
145 : }
146 :
147 402 : if ((!gstProtectionMetadataFactory) ||
148 402 : (!(m_protectionMetadataWrapper = gstProtectionMetadataFactory->createProtectionMetadataWrapper(m_gstWrapper))))
149 : {
150 0 : throw std::runtime_error("Cannot create protection metadata wrapper");
151 : }
152 :
153 : // Ensure that rialtosrc has been initalised
154 201 : m_context.gstSrc->initSrc();
155 :
156 : // Start task thread
157 201 : if ((!workerThreadFactory) || (!(m_workerThread = workerThreadFactory->createWorkerThread())))
158 : {
159 0 : throw std::runtime_error("Failed to create the worker thread");
160 : }
161 :
162 : // Initialise pipeline
163 201 : switch (type)
164 : {
165 200 : case MediaType::MSE:
166 : {
167 200 : initMsePipeline();
168 200 : break;
169 : }
170 1 : default:
171 : {
172 1 : resetWorkerThread();
173 1 : throw std::runtime_error("Media type not supported");
174 : }
175 : }
176 :
177 : // Check the video requirements for a limited video.
178 : // If the video requirements are set to anything lower than the minimum, this playback is assumed to be a secondary
179 : // video in a dual video scenario.
180 200 : if ((kMinPrimaryVideoWidth > videoRequirements.maxWidth) || (kMinPrimaryVideoHeight > videoRequirements.maxHeight))
181 : {
182 8 : RIALTO_SERVER_LOG_MIL("Secondary video playback selected");
183 8 : bool westerossinkSecondaryVideoResult = setWesterossinkSecondaryVideo();
184 8 : bool ermContextResult = setErmContext();
185 8 : if (!westerossinkSecondaryVideoResult && !ermContextResult)
186 : {
187 1 : resetWorkerThread();
188 1 : termPipeline();
189 1 : throw std::runtime_error("Could not set secondary video");
190 : }
191 7 : }
192 : else
193 : {
194 192 : RIALTO_SERVER_LOG_MIL("Primary video playback selected");
195 : }
196 :
197 : m_gstDispatcherThread =
198 199 : gstDispatcherThreadFactory->createGstDispatcherThread(*this, m_context.pipeline, m_gstWrapper);
199 269 : }
200 :
201 398 : GstGenericPlayer::~GstGenericPlayer()
202 : {
203 199 : RIALTO_SERVER_LOG_DEBUG("GstGenericPlayer is destructed.");
204 :
205 199 : m_gstDispatcherThread.reset();
206 :
207 199 : resetWorkerThread();
208 :
209 199 : termPipeline();
210 398 : }
211 :
212 200 : void GstGenericPlayer::initMsePipeline()
213 : {
214 : // Make playbin
215 200 : m_context.pipeline = m_gstWrapper->gstElementFactoryMake("playbin", "media_pipeline");
216 : // Set pipeline flags
217 200 : setPlaybinFlags(true);
218 :
219 : // Set callbacks
220 200 : m_glibWrapper->gSignalConnect(m_context.pipeline, "source-setup", G_CALLBACK(&GstGenericPlayer::setupSource), this);
221 200 : m_glibWrapper->gSignalConnect(m_context.pipeline, "element-setup", G_CALLBACK(&GstGenericPlayer::setupElement), this);
222 200 : m_glibWrapper->gSignalConnect(m_context.pipeline, "deep-element-added",
223 : G_CALLBACK(&GstGenericPlayer::deepElementAdded), this);
224 :
225 : // Set uri
226 200 : m_glibWrapper->gObjectSet(m_context.pipeline, "uri", "rialto://", nullptr);
227 :
228 : // Check playsink
229 200 : GstElement *playsink = (m_gstWrapper->gstBinGetByName(GST_BIN(m_context.pipeline), "playsink"));
230 200 : if (playsink)
231 : {
232 199 : m_glibWrapper->gObjectSet(G_OBJECT(playsink), "send-event-mode", 0, nullptr);
233 199 : m_gstWrapper->gstObjectUnref(playsink);
234 : }
235 : else
236 : {
237 1 : GST_WARNING("No playsink ?!?!?");
238 : }
239 200 : }
240 :
241 201 : void GstGenericPlayer::resetWorkerThread()
242 : {
243 : // Shutdown task thread
244 201 : m_workerThread->enqueueTask(m_taskFactory->createShutdown(*this));
245 201 : m_workerThread->join();
246 201 : m_workerThread.reset();
247 : }
248 :
249 200 : void GstGenericPlayer::termPipeline()
250 : {
251 200 : if (m_finishSourceSetupTimer && m_finishSourceSetupTimer->isActive())
252 : {
253 0 : m_finishSourceSetupTimer->cancel();
254 : }
255 :
256 200 : m_finishSourceSetupTimer.reset();
257 :
258 247 : for (auto &elem : m_context.streamInfo)
259 : {
260 47 : StreamInfo &streamInfo = elem.second;
261 49 : for (auto &buffer : streamInfo.buffers)
262 : {
263 2 : m_gstWrapper->gstBufferUnref(buffer);
264 : }
265 :
266 47 : streamInfo.buffers.clear();
267 : }
268 :
269 200 : m_taskFactory->createStop(m_context, *this)->execute();
270 200 : GstBus *bus = m_gstWrapper->gstPipelineGetBus(GST_PIPELINE(m_context.pipeline));
271 200 : m_gstWrapper->gstBusSetSyncHandler(bus, nullptr, nullptr, nullptr);
272 200 : m_gstWrapper->gstObjectUnref(bus);
273 :
274 200 : if (m_context.source)
275 : {
276 1 : m_gstWrapper->gstObjectUnref(m_context.source);
277 : }
278 200 : if (m_context.subtitleSink)
279 : {
280 4 : m_gstWrapper->gstObjectUnref(m_context.subtitleSink);
281 : }
282 :
283 : // Delete the pipeline
284 200 : m_gstWrapper->gstObjectUnref(m_context.pipeline);
285 : }
286 :
287 801 : unsigned GstGenericPlayer::getGstPlayFlag(const char *nick)
288 : {
289 : GFlagsClass *flagsClass =
290 801 : static_cast<GFlagsClass *>(m_glibWrapper->gTypeClassRef(m_glibWrapper->gTypeFromName("GstPlayFlags")));
291 801 : GFlagsValue *flag = m_glibWrapper->gFlagsGetValueByNick(flagsClass, nick);
292 801 : return flag ? flag->value : 0;
293 : }
294 :
295 1 : void GstGenericPlayer::setupSource(GstElement *pipeline, GstElement *source, GstGenericPlayer *self)
296 : {
297 1 : self->m_gstWrapper->gstObjectRef(source);
298 1 : if (self->m_workerThread)
299 : {
300 1 : self->m_workerThread->enqueueTask(self->m_taskFactory->createSetupSource(self->m_context, *self, source));
301 : }
302 : }
303 :
304 1 : void GstGenericPlayer::setupElement(GstElement *pipeline, GstElement *element, GstGenericPlayer *self)
305 : {
306 1 : RIALTO_SERVER_LOG_DEBUG("Element %s added to the pipeline", GST_ELEMENT_NAME(element));
307 1 : self->m_gstWrapper->gstObjectRef(element);
308 1 : if (self->m_workerThread)
309 : {
310 1 : self->m_workerThread->enqueueTask(self->m_taskFactory->createSetupElement(self->m_context, *self, element));
311 : }
312 : }
313 :
314 1 : void GstGenericPlayer::deepElementAdded(GstBin *pipeline, GstBin *bin, GstElement *element, GstGenericPlayer *self)
315 : {
316 1 : RIALTO_SERVER_LOG_DEBUG("Deep element %s added to the pipeline", GST_ELEMENT_NAME(element));
317 1 : if (self->m_workerThread)
318 : {
319 2 : self->m_workerThread->enqueueTask(
320 2 : self->m_taskFactory->createDeepElementAdded(self->m_context, *self, pipeline, bin, element));
321 : }
322 1 : }
323 :
324 1 : void GstGenericPlayer::attachSource(const std::unique_ptr<IMediaPipeline::MediaSource> &attachedSource)
325 : {
326 1 : if (m_workerThread)
327 : {
328 1 : m_workerThread->enqueueTask(m_taskFactory->createAttachSource(m_context, *this, attachedSource));
329 : }
330 : }
331 :
332 1 : void GstGenericPlayer::removeSource(const MediaSourceType &mediaSourceType)
333 : {
334 1 : if (m_workerThread)
335 : {
336 1 : m_workerThread->enqueueTask(m_taskFactory->createRemoveSource(m_context, *this, mediaSourceType));
337 : }
338 : }
339 :
340 2 : void GstGenericPlayer::allSourcesAttached()
341 : {
342 2 : if (m_workerThread)
343 : {
344 2 : m_workerThread->enqueueTask(m_taskFactory->createFinishSetupSource(m_context, *this));
345 : }
346 : }
347 :
348 1 : void GstGenericPlayer::attachSamples(const IMediaPipeline::MediaSegmentVector &mediaSegments)
349 : {
350 1 : if (m_workerThread)
351 : {
352 1 : m_workerThread->enqueueTask(m_taskFactory->createAttachSamples(m_context, *this, mediaSegments));
353 : }
354 : }
355 :
356 1 : void GstGenericPlayer::attachSamples(const std::shared_ptr<IDataReader> &dataReader)
357 : {
358 1 : if (m_workerThread)
359 : {
360 1 : m_workerThread->enqueueTask(m_taskFactory->createReadShmDataAndAttachSamples(m_context, *this, dataReader));
361 : }
362 : }
363 :
364 1 : void GstGenericPlayer::setPosition(std::int64_t position)
365 : {
366 1 : if (m_workerThread)
367 : {
368 1 : m_workerThread->enqueueTask(m_taskFactory->createSetPosition(m_context, *this, position));
369 : }
370 : }
371 :
372 1 : void GstGenericPlayer::setPlaybackRate(double rate)
373 : {
374 1 : if (m_workerThread)
375 : {
376 1 : m_workerThread->enqueueTask(m_taskFactory->createSetPlaybackRate(m_context, rate));
377 : }
378 : }
379 :
380 4 : bool GstGenericPlayer::getPosition(std::int64_t &position)
381 : {
382 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
383 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
384 4 : if (!m_context.pipeline || GST_STATE(m_context.pipeline) < GST_STATE_PAUSED)
385 : {
386 1 : RIALTO_SERVER_LOG_WARN("GetPosition failed. Pipeline is null or state < PAUSED");
387 1 : return false;
388 : }
389 3 : if (!m_gstWrapper->gstElementQueryPosition(m_context.pipeline, GST_FORMAT_TIME, &position))
390 : {
391 1 : return false;
392 : }
393 2 : return true;
394 : }
395 :
396 34 : GstElement *GstGenericPlayer::getSink(const MediaSourceType &mediaSourceType) const
397 : {
398 34 : const char *kSinkName{nullptr};
399 34 : GstElement *sink{nullptr};
400 34 : switch (mediaSourceType)
401 : {
402 18 : case MediaSourceType::AUDIO:
403 18 : kSinkName = "audio-sink";
404 18 : break;
405 14 : case MediaSourceType::VIDEO:
406 14 : kSinkName = "video-sink";
407 14 : break;
408 2 : default:
409 2 : break;
410 : }
411 34 : if (!kSinkName)
412 : {
413 2 : RIALTO_SERVER_LOG_WARN("mediaSourceType not supported %d", static_cast<int>(mediaSourceType));
414 : }
415 : else
416 : {
417 32 : if (m_context.pipeline == nullptr)
418 : {
419 0 : RIALTO_SERVER_LOG_WARN("Pipeline is NULL!");
420 : }
421 : else
422 : {
423 32 : RIALTO_SERVER_LOG_DEBUG("Pipeline is valid: %p", m_context.pipeline);
424 : }
425 32 : m_glibWrapper->gObjectGet(m_context.pipeline, kSinkName, &sink, nullptr);
426 32 : if (sink)
427 : {
428 22 : GstElement *autoSink{sink};
429 22 : if (firebolt::rialto::MediaSourceType::VIDEO == mediaSourceType)
430 11 : autoSink = getSinkChildIfAutoVideoSink(sink);
431 11 : else if (firebolt::rialto::MediaSourceType::AUDIO == mediaSourceType)
432 11 : autoSink = getSinkChildIfAutoAudioSink(sink);
433 :
434 : // Is this an auto-sink?...
435 22 : if (autoSink != sink)
436 : {
437 2 : m_gstWrapper->gstObjectUnref(GST_OBJECT(sink));
438 :
439 : // increase the reference count of the auto sink
440 2 : sink = GST_ELEMENT(m_gstWrapper->gstObjectRef(GST_OBJECT(autoSink)));
441 : }
442 : }
443 : else
444 : {
445 10 : RIALTO_SERVER_LOG_WARN("%s could not be obtained", kSinkName);
446 : }
447 : }
448 34 : return sink;
449 : }
450 :
451 19 : GstElement *GstGenericPlayer::getDecoder(const MediaSourceType &mediaSourceType)
452 : {
453 19 : GstIterator *it = m_gstWrapper->gstBinIterateRecurse(GST_BIN(m_context.pipeline));
454 19 : GValue item = G_VALUE_INIT;
455 19 : gboolean done = FALSE;
456 :
457 28 : while (!done)
458 : {
459 21 : switch (m_gstWrapper->gstIteratorNext(it, &item))
460 : {
461 12 : case GST_ITERATOR_OK:
462 : {
463 12 : GstElement *element = GST_ELEMENT(m_glibWrapper->gValueGetObject(&item));
464 12 : GstElementFactory *factory = m_gstWrapper->gstElementGetFactory(element);
465 :
466 12 : if (factory)
467 : {
468 12 : GstElementFactoryListType type = GST_ELEMENT_FACTORY_TYPE_DECODER;
469 12 : if (mediaSourceType == MediaSourceType::AUDIO)
470 : {
471 12 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO;
472 : }
473 0 : else if (mediaSourceType == MediaSourceType::VIDEO)
474 : {
475 0 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO;
476 : }
477 :
478 12 : if (m_gstWrapper->gstElementFactoryListIsType(factory, type))
479 : {
480 12 : m_glibWrapper->gValueUnset(&item);
481 12 : m_gstWrapper->gstIteratorFree(it);
482 12 : return GST_ELEMENT(m_gstWrapper->gstObjectRef(element));
483 : }
484 : }
485 :
486 0 : m_glibWrapper->gValueUnset(&item);
487 0 : break;
488 : }
489 2 : case GST_ITERATOR_RESYNC:
490 2 : m_gstWrapper->gstIteratorResync(it);
491 2 : break;
492 7 : case GST_ITERATOR_ERROR:
493 : case GST_ITERATOR_DONE:
494 7 : done = TRUE;
495 7 : break;
496 : }
497 : }
498 :
499 7 : RIALTO_SERVER_LOG_WARN("Could not find decoder");
500 :
501 7 : m_glibWrapper->gValueUnset(&item);
502 7 : m_gstWrapper->gstIteratorFree(it);
503 :
504 7 : return nullptr;
505 : }
506 :
507 3 : GstElement *GstGenericPlayer::getParser(const MediaSourceType &mediaSourceType)
508 : {
509 3 : GstIterator *it = m_gstWrapper->gstBinIterateRecurse(GST_BIN(m_context.pipeline));
510 3 : GValue item = G_VALUE_INIT;
511 3 : gboolean done = FALSE;
512 :
513 4 : while (!done)
514 : {
515 3 : switch (m_gstWrapper->gstIteratorNext(it, &item))
516 : {
517 2 : case GST_ITERATOR_OK:
518 : {
519 2 : GstElement *element = GST_ELEMENT(m_glibWrapper->gValueGetObject(&item));
520 2 : GstElementFactory *factory = m_gstWrapper->gstElementGetFactory(element);
521 :
522 2 : if (factory)
523 : {
524 2 : GstElementFactoryListType type = GST_ELEMENT_FACTORY_TYPE_PARSER;
525 2 : if (mediaSourceType == MediaSourceType::AUDIO)
526 : {
527 0 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO;
528 : }
529 2 : else if (mediaSourceType == MediaSourceType::VIDEO)
530 : {
531 2 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO;
532 : }
533 :
534 2 : if (m_gstWrapper->gstElementFactoryListIsType(factory, type))
535 : {
536 2 : m_glibWrapper->gValueUnset(&item);
537 2 : m_gstWrapper->gstIteratorFree(it);
538 2 : return GST_ELEMENT(m_gstWrapper->gstObjectRef(element));
539 : }
540 : }
541 :
542 0 : m_glibWrapper->gValueUnset(&item);
543 0 : break;
544 : }
545 0 : case GST_ITERATOR_RESYNC:
546 0 : m_gstWrapper->gstIteratorResync(it);
547 0 : break;
548 1 : case GST_ITERATOR_ERROR:
549 : case GST_ITERATOR_DONE:
550 1 : done = TRUE;
551 1 : break;
552 : }
553 : }
554 :
555 1 : RIALTO_SERVER_LOG_WARN("Could not find parser");
556 :
557 1 : m_glibWrapper->gValueUnset(&item);
558 1 : m_gstWrapper->gstIteratorFree(it);
559 :
560 1 : return nullptr;
561 : }
562 :
563 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate>
564 5 : GstGenericPlayer::createAudioAttributes(const std::unique_ptr<IMediaPipeline::MediaSource> &source) const
565 : {
566 5 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate> audioAttributes;
567 5 : const IMediaPipeline::MediaSourceAudio *kSource = dynamic_cast<IMediaPipeline::MediaSourceAudio *>(source.get());
568 5 : if (kSource)
569 : {
570 4 : firebolt::rialto::AudioConfig audioConfig = kSource->getAudioConfig();
571 : audioAttributes =
572 8 : firebolt::rialto::wrappers::AudioAttributesPrivate{"", // param set below.
573 4 : audioConfig.numberOfChannels, audioConfig.sampleRate,
574 : 0, // used only in one of logs in rdk_gstreamer_utils, no
575 : // need to set this param.
576 : 0, // used only in one of logs in rdk_gstreamer_utils, no
577 : // need to set this param.
578 4 : audioConfig.codecSpecificConfig.data(),
579 : static_cast<std::uint32_t>(
580 4 : audioConfig.codecSpecificConfig.size())};
581 4 : if (source->getMimeType() == "audio/mp4" || source->getMimeType() == "audio/aac")
582 : {
583 2 : audioAttributes->m_codecParam = "mp4a";
584 : }
585 2 : else if (source->getMimeType() == "audio/x-eac3")
586 : {
587 1 : audioAttributes->m_codecParam = "ec-3";
588 : }
589 1 : else if (source->getMimeType() == "audio/b-wav" || source->getMimeType() == "audio/x-raw")
590 : {
591 1 : audioAttributes->m_codecParam = "lpcm";
592 : }
593 4 : }
594 : else
595 : {
596 1 : RIALTO_SERVER_LOG_ERROR("Failed to cast source");
597 : }
598 :
599 5 : return audioAttributes;
600 : }
601 :
602 1 : bool GstGenericPlayer::setImmediateOutput(const MediaSourceType &mediaSourceType, bool immediateOutputParam)
603 : {
604 1 : if (!m_workerThread)
605 0 : return false;
606 :
607 2 : m_workerThread->enqueueTask(
608 2 : m_taskFactory->createSetImmediateOutput(m_context, *this, mediaSourceType, immediateOutputParam));
609 1 : return true;
610 : }
611 :
612 5 : bool GstGenericPlayer::getImmediateOutput(const MediaSourceType &mediaSourceType, bool &immediateOutputRef)
613 : {
614 5 : bool returnValue{false};
615 5 : GstElement *sink{getSink(mediaSourceType)};
616 5 : if (sink)
617 : {
618 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "immediate-output"))
619 : {
620 2 : m_glibWrapper->gObjectGet(sink, "immediate-output", &immediateOutputRef, nullptr);
621 2 : returnValue = true;
622 : }
623 : else
624 : {
625 1 : RIALTO_SERVER_LOG_ERROR("immediate-output not supported in element %s", GST_ELEMENT_NAME(sink));
626 : }
627 3 : m_gstWrapper->gstObjectUnref(sink);
628 : }
629 : else
630 : {
631 2 : RIALTO_SERVER_LOG_ERROR("Failed to set immediate-output property, sink is NULL");
632 : }
633 :
634 5 : return returnValue;
635 : }
636 :
637 5 : bool GstGenericPlayer::getStats(const MediaSourceType &mediaSourceType, uint64_t &renderedFrames, uint64_t &droppedFrames)
638 : {
639 5 : bool returnValue{false};
640 5 : GstElement *sink{getSink(mediaSourceType)};
641 5 : if (sink)
642 : {
643 3 : GstStructure *stats{nullptr};
644 3 : m_glibWrapper->gObjectGet(sink, "stats", &stats, nullptr);
645 3 : if (!stats)
646 : {
647 1 : RIALTO_SERVER_LOG_ERROR("failed to get stats from '%s'", GST_ELEMENT_NAME(sink));
648 : }
649 : else
650 : {
651 : guint64 renderedFramesTmp;
652 : guint64 droppedFramesTmp;
653 3 : if (m_gstWrapper->gstStructureGetUint64(stats, "rendered", &renderedFramesTmp) &&
654 1 : m_gstWrapper->gstStructureGetUint64(stats, "dropped", &droppedFramesTmp))
655 : {
656 1 : renderedFrames = renderedFramesTmp;
657 1 : droppedFrames = droppedFramesTmp;
658 1 : returnValue = true;
659 : }
660 : else
661 : {
662 1 : RIALTO_SERVER_LOG_ERROR("failed to get 'rendered' or 'dropped' from structure (%s)",
663 : GST_ELEMENT_NAME(sink));
664 : }
665 2 : m_gstWrapper->gstStructureFree(stats);
666 : }
667 3 : m_gstWrapper->gstObjectUnref(sink);
668 : }
669 :
670 5 : return returnValue;
671 : }
672 :
673 4 : GstBuffer *GstGenericPlayer::createBuffer(const IMediaPipeline::MediaSegment &mediaSegment) const
674 : {
675 4 : GstBuffer *gstBuffer = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getDataLength(), nullptr);
676 4 : m_gstWrapper->gstBufferFill(gstBuffer, 0, mediaSegment.getData(), mediaSegment.getDataLength());
677 :
678 4 : if (mediaSegment.isEncrypted())
679 : {
680 3 : GstBuffer *keyId = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getKeyId().size(), nullptr);
681 3 : m_gstWrapper->gstBufferFill(keyId, 0, mediaSegment.getKeyId().data(), mediaSegment.getKeyId().size());
682 :
683 3 : GstBuffer *initVector = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getInitVector().size(), nullptr);
684 6 : m_gstWrapper->gstBufferFill(initVector, 0, mediaSegment.getInitVector().data(),
685 3 : mediaSegment.getInitVector().size());
686 3 : GstBuffer *subsamples{nullptr};
687 3 : if (!mediaSegment.getSubSamples().empty())
688 : {
689 3 : auto subsamplesRawSize = mediaSegment.getSubSamples().size() * (sizeof(guint16) + sizeof(guint32));
690 3 : guint8 *subsamplesRaw = static_cast<guint8 *>(m_glibWrapper->gMalloc(subsamplesRawSize));
691 : GstByteWriter writer;
692 3 : m_gstWrapper->gstByteWriterInitWithData(&writer, subsamplesRaw, subsamplesRawSize, FALSE);
693 :
694 6 : for (const auto &subSample : mediaSegment.getSubSamples())
695 : {
696 3 : m_gstWrapper->gstByteWriterPutUint16Be(&writer, subSample.numClearBytes);
697 3 : m_gstWrapper->gstByteWriterPutUint32Be(&writer, subSample.numEncryptedBytes);
698 : }
699 3 : subsamples = m_gstWrapper->gstBufferNewWrapped(subsamplesRaw, subsamplesRawSize);
700 : }
701 :
702 3 : uint32_t crypt = 0;
703 3 : uint32_t skip = 0;
704 3 : bool encryptionPatternSet = mediaSegment.getEncryptionPattern(crypt, skip);
705 :
706 3 : GstRialtoProtectionData data = {mediaSegment.getMediaKeySessionId(),
707 3 : static_cast<uint32_t>(mediaSegment.getSubSamples().size()),
708 3 : mediaSegment.getInitWithLast15(),
709 : keyId,
710 : initVector,
711 : subsamples,
712 6 : mediaSegment.getCipherMode(),
713 : crypt,
714 : skip,
715 : encryptionPatternSet,
716 6 : m_context.decryptionService};
717 :
718 3 : if (!m_protectionMetadataWrapper->addProtectionMetadata(gstBuffer, data))
719 : {
720 1 : RIALTO_SERVER_LOG_ERROR("Failed to add protection metadata");
721 1 : if (keyId)
722 : {
723 1 : m_gstWrapper->gstBufferUnref(keyId);
724 : }
725 1 : if (initVector)
726 : {
727 1 : m_gstWrapper->gstBufferUnref(initVector);
728 : }
729 1 : if (subsamples)
730 : {
731 1 : m_gstWrapper->gstBufferUnref(subsamples);
732 : }
733 : }
734 : }
735 :
736 4 : GST_BUFFER_TIMESTAMP(gstBuffer) = mediaSegment.getTimeStamp();
737 4 : GST_BUFFER_DURATION(gstBuffer) = mediaSegment.getDuration();
738 4 : return gstBuffer;
739 : }
740 :
741 4 : void GstGenericPlayer::notifyNeedMediaData(const MediaSourceType mediaSource)
742 : {
743 4 : auto elem = m_context.streamInfo.find(mediaSource);
744 4 : if (elem != m_context.streamInfo.end())
745 : {
746 2 : StreamInfo &streamInfo = elem->second;
747 2 : streamInfo.isNeedDataPending = false;
748 :
749 : // Send new NeedMediaData if we still need it
750 2 : if (m_gstPlayerClient && streamInfo.isDataNeeded)
751 : {
752 2 : streamInfo.isNeedDataPending = m_gstPlayerClient->notifyNeedMediaData(mediaSource);
753 : }
754 : }
755 : else
756 : {
757 2 : RIALTO_SERVER_LOG_WARN("Media type %s could not be found", common::convertMediaSourceType(mediaSource));
758 : }
759 4 : }
760 :
761 18 : void GstGenericPlayer::attachData(const firebolt::rialto::MediaSourceType mediaType)
762 : {
763 18 : auto elem = m_context.streamInfo.find(mediaType);
764 18 : if (elem != m_context.streamInfo.end())
765 : {
766 15 : StreamInfo &streamInfo = elem->second;
767 15 : if (streamInfo.buffers.empty() || !streamInfo.isDataNeeded)
768 : {
769 2 : return;
770 : }
771 :
772 13 : if (firebolt::rialto::MediaSourceType::SUBTITLE == mediaType)
773 : {
774 2 : setTextTrackPositionIfRequired(streamInfo.appSrc);
775 : }
776 : else
777 : {
778 11 : pushSampleIfRequired(streamInfo.appSrc, common::convertMediaSourceType(mediaType));
779 : }
780 13 : if (mediaType == firebolt::rialto::MediaSourceType::AUDIO)
781 : {
782 : // This needs to be done before gstAppSrcPushBuffer() is
783 : // called because it can free the memory
784 6 : m_context.lastAudioSampleTimestamps = static_cast<int64_t>(GST_BUFFER_PTS(streamInfo.buffers.back()));
785 : }
786 :
787 26 : for (GstBuffer *buffer : streamInfo.buffers)
788 : {
789 13 : m_gstWrapper->gstAppSrcPushBuffer(GST_APP_SRC(streamInfo.appSrc), buffer);
790 : }
791 13 : streamInfo.buffers.clear();
792 13 : streamInfo.isDataPushed = true;
793 :
794 13 : const bool kIsSingle = m_context.streamInfo.size() == 1;
795 13 : bool allOtherStreamsPushed = std::all_of(m_context.streamInfo.begin(), m_context.streamInfo.end(),
796 14 : [](const auto &entry) { return entry.second.isDataPushed; });
797 :
798 13 : if (!m_context.bufferedNotificationSent && (allOtherStreamsPushed || kIsSingle) && m_gstPlayerClient)
799 : {
800 1 : m_context.bufferedNotificationSent = true;
801 1 : m_gstPlayerClient->notifyNetworkState(NetworkState::BUFFERED);
802 : }
803 13 : cancelUnderflow(mediaType);
804 :
805 13 : const auto eosInfoIt = m_context.endOfStreamInfo.find(mediaType);
806 13 : if (eosInfoIt != m_context.endOfStreamInfo.end() && eosInfoIt->second == EosState::PENDING)
807 : {
808 0 : setEos(mediaType);
809 : }
810 : }
811 : }
812 :
813 7 : void GstGenericPlayer::updateAudioCaps(int32_t rate, int32_t channels, const std::shared_ptr<CodecData> &codecData)
814 : {
815 7 : auto elem = m_context.streamInfo.find(firebolt::rialto::MediaSourceType::AUDIO);
816 7 : if (elem != m_context.streamInfo.end())
817 : {
818 6 : StreamInfo &streamInfo = elem->second;
819 :
820 6 : constexpr int kInvalidRate{0}, kInvalidChannels{0};
821 6 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(streamInfo.appSrc));
822 6 : GstCaps *newCaps = m_gstWrapper->gstCapsCopy(currentCaps);
823 :
824 6 : if (rate != kInvalidRate)
825 : {
826 3 : m_gstWrapper->gstCapsSetSimple(newCaps, "rate", G_TYPE_INT, rate, NULL);
827 : }
828 :
829 6 : if (channels != kInvalidChannels)
830 : {
831 3 : m_gstWrapper->gstCapsSetSimple(newCaps, "channels", G_TYPE_INT, channels, NULL);
832 : }
833 :
834 6 : setCodecData(newCaps, codecData);
835 :
836 6 : if (!m_gstWrapper->gstCapsIsEqual(currentCaps, newCaps))
837 : {
838 5 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(streamInfo.appSrc), newCaps);
839 : }
840 :
841 6 : m_gstWrapper->gstCapsUnref(newCaps);
842 6 : m_gstWrapper->gstCapsUnref(currentCaps);
843 : }
844 7 : }
845 :
846 8 : void GstGenericPlayer::updateVideoCaps(int32_t width, int32_t height, Fraction frameRate,
847 : const std::shared_ptr<CodecData> &codecData)
848 : {
849 8 : auto elem = m_context.streamInfo.find(firebolt::rialto::MediaSourceType::VIDEO);
850 8 : if (elem != m_context.streamInfo.end())
851 : {
852 7 : StreamInfo &streamInfo = elem->second;
853 :
854 7 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(streamInfo.appSrc));
855 7 : GstCaps *newCaps = m_gstWrapper->gstCapsCopy(currentCaps);
856 :
857 7 : if (width > 0)
858 : {
859 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "width", G_TYPE_INT, width, NULL);
860 : }
861 :
862 7 : if (height > 0)
863 : {
864 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "height", G_TYPE_INT, height, NULL);
865 : }
866 :
867 7 : if ((kUndefinedSize != frameRate.numerator) && (kUndefinedSize != frameRate.denominator))
868 : {
869 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "framerate", GST_TYPE_FRACTION, frameRate.numerator,
870 : frameRate.denominator, NULL);
871 : }
872 :
873 7 : setCodecData(newCaps, codecData);
874 :
875 7 : if (!m_gstWrapper->gstCapsIsEqual(currentCaps, newCaps))
876 : {
877 6 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(streamInfo.appSrc), newCaps);
878 : }
879 :
880 7 : m_gstWrapper->gstCapsUnref(currentCaps);
881 7 : m_gstWrapper->gstCapsUnref(newCaps);
882 : }
883 8 : }
884 :
885 5 : void GstGenericPlayer::addAudioClippingToBuffer(GstBuffer *buffer, uint64_t clippingStart, uint64_t clippingEnd) const
886 : {
887 5 : if (clippingStart || clippingEnd)
888 : {
889 4 : if (m_gstWrapper->gstBufferAddAudioClippingMeta(buffer, GST_FORMAT_TIME, clippingStart, clippingEnd))
890 : {
891 3 : RIALTO_SERVER_LOG_DEBUG("Added audio clipping to buffer %p, start: %" PRIu64 ", end %" PRIu64, buffer,
892 : clippingStart, clippingEnd);
893 : }
894 : else
895 : {
896 1 : RIALTO_SERVER_LOG_WARN("Failed to add audio clipping to buffer %p, start: %" PRIu64 ", end %" PRIu64,
897 : buffer, clippingStart, clippingEnd);
898 : }
899 : }
900 5 : }
901 :
902 13 : bool GstGenericPlayer::setCodecData(GstCaps *caps, const std::shared_ptr<CodecData> &codecData) const
903 : {
904 13 : if (codecData && CodecDataType::BUFFER == codecData->type)
905 : {
906 7 : gpointer memory = m_glibWrapper->gMemdup(codecData->data.data(), codecData->data.size());
907 7 : GstBuffer *buf = m_gstWrapper->gstBufferNewWrapped(memory, codecData->data.size());
908 7 : m_gstWrapper->gstCapsSetSimple(caps, "codec_data", GST_TYPE_BUFFER, buf, nullptr);
909 7 : m_gstWrapper->gstBufferUnref(buf);
910 7 : return true;
911 : }
912 6 : if (codecData && CodecDataType::STRING == codecData->type)
913 : {
914 2 : std::string codecDataStr(codecData->data.begin(), codecData->data.end());
915 2 : m_gstWrapper->gstCapsSetSimple(caps, "codec_data", G_TYPE_STRING, codecDataStr.c_str(), nullptr);
916 2 : return true;
917 : }
918 4 : return false;
919 : }
920 :
921 11 : void GstGenericPlayer::pushSampleIfRequired(GstElement *source, const std::string &typeStr)
922 : {
923 11 : auto initialPosition = m_context.initialPositions.find(source);
924 11 : if (m_context.initialPositions.end() == initialPosition)
925 : {
926 : // Sending initial sample not needed
927 7 : return;
928 : }
929 7 : for (const auto &[position, resetTime, appliedRate, stopPosition] : initialPosition->second)
930 : {
931 4 : GstSeekFlags seekFlag = resetTime ? GST_SEEK_FLAG_FLUSH : GST_SEEK_FLAG_NONE;
932 4 : RIALTO_SERVER_LOG_DEBUG("Pushing new %s sample...", typeStr.c_str());
933 4 : GstSegment *segment{m_gstWrapper->gstSegmentNew()};
934 4 : m_gstWrapper->gstSegmentInit(segment, GST_FORMAT_TIME);
935 4 : if (!m_gstWrapper->gstSegmentDoSeek(segment, m_context.playbackRate, GST_FORMAT_TIME, seekFlag,
936 : GST_SEEK_TYPE_SET, position, GST_SEEK_TYPE_SET, stopPosition, nullptr))
937 : {
938 1 : RIALTO_SERVER_LOG_WARN("Segment seek failed.");
939 1 : m_gstWrapper->gstSegmentFree(segment);
940 1 : m_context.initialPositions.erase(initialPosition);
941 1 : return;
942 : }
943 3 : segment->applied_rate = appliedRate;
944 3 : RIALTO_SERVER_LOG_MIL("New %s segment: [%" GST_TIME_FORMAT ", %" GST_TIME_FORMAT
945 : "], rate: %f, appliedRate %f, reset_time: %d\n",
946 : typeStr.c_str(), GST_TIME_ARGS(segment->start), GST_TIME_ARGS(segment->stop),
947 : segment->rate, segment->applied_rate, resetTime);
948 :
949 3 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(source));
950 : // We can't pass buffer in GstSample, because implementation of gst_app_src_push_sample
951 : // uses gst_buffer_copy, which loses RialtoProtectionMeta (that causes problems with EME
952 : // for first frame).
953 3 : GstSample *sample = m_gstWrapper->gstSampleNew(nullptr, currentCaps, segment, nullptr);
954 3 : m_gstWrapper->gstAppSrcPushSample(GST_APP_SRC(source), sample);
955 3 : m_gstWrapper->gstSampleUnref(sample);
956 3 : m_gstWrapper->gstCapsUnref(currentCaps);
957 :
958 3 : m_gstWrapper->gstSegmentFree(segment);
959 : }
960 3 : m_context.initialPositions.erase(initialPosition);
961 3 : return;
962 : }
963 :
964 2 : void GstGenericPlayer::setTextTrackPositionIfRequired(GstElement *source)
965 : {
966 2 : auto initialPosition = m_context.initialPositions.find(source);
967 2 : if (m_context.initialPositions.end() == initialPosition)
968 : {
969 : // Sending initial sample not needed
970 1 : return;
971 : }
972 :
973 1 : m_glibWrapper->gObjectSet(m_context.subtitleSink, "position",
974 1 : static_cast<guint64>(initialPosition->second.back().position), nullptr);
975 :
976 1 : m_context.initialPositions.erase(initialPosition);
977 : }
978 :
979 7 : bool GstGenericPlayer::reattachSource(const std::unique_ptr<IMediaPipeline::MediaSource> &source)
980 : {
981 7 : if (m_context.streamInfo.find(source->getType()) == m_context.streamInfo.end())
982 : {
983 1 : RIALTO_SERVER_LOG_ERROR("Unable to switch source, type does not exist");
984 1 : return false;
985 : }
986 6 : if (source->getMimeType().empty())
987 : {
988 1 : RIALTO_SERVER_LOG_WARN("Skip switch audio source. Unknown mime type");
989 1 : return false;
990 : }
991 5 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate> audioAttributes{createAudioAttributes(source)};
992 5 : if (!audioAttributes)
993 : {
994 1 : RIALTO_SERVER_LOG_ERROR("Failed to create audio attributes");
995 1 : return false;
996 : }
997 : std::int64_t currentDispPts64b; // In netflix code it's currentDisplayPosition + offset
998 4 : m_gstWrapper->gstElementQueryPosition(m_context.pipeline, GST_FORMAT_TIME, ¤tDispPts64b);
999 4 : long long currentDispPts = currentDispPts64b; // NOLINT(runtime/int)
1000 4 : GstCaps *caps{createCapsFromMediaSource(m_gstWrapper, m_glibWrapper, source)};
1001 4 : GstAppSrc *appSrc{GST_APP_SRC(m_context.streamInfo[source->getType()].appSrc)};
1002 4 : GstCaps *oldCaps = m_gstWrapper->gstAppSrcGetCaps(appSrc);
1003 4 : if ((!oldCaps) || (!m_gstWrapper->gstCapsIsEqual(caps, oldCaps)))
1004 : {
1005 3 : RIALTO_SERVER_LOG_DEBUG("Caps not equal. Perform audio track codec channel switch.");
1006 3 : int sampleAttributes{
1007 : 0}; // rdk_gstreamer_utils::performAudioTrackCodecChannelSwitch checks if this param != NULL only.
1008 3 : std::uint32_t status{0}; // must be 0 to make rdk_gstreamer_utils::performAudioTrackCodecChannelSwitch work
1009 3 : unsigned int ui32Delay{0}; // output param
1010 3 : long long audioChangeTargetPts{-1}; // NOLINT(runtime/int) output param. Set audioChangeTargetPts =
1011 : // currentDispPts in rdk_gstreamer_utils function stub
1012 3 : unsigned int audioChangeStage{0}; // Output param. Set to AUDCHG_ALIGN in rdk_gstreamer_utils function stub
1013 3 : gchar *oldCapsCStr = m_gstWrapper->gstCapsToString(oldCaps);
1014 3 : std::string oldCapsStr = std::string(oldCapsCStr);
1015 3 : m_glibWrapper->gFree(oldCapsCStr);
1016 3 : bool audioAac{oldCapsStr.find("audio/mpeg") != std::string::npos};
1017 3 : bool svpEnabled{true}; // assume always true
1018 3 : bool retVal{false}; // Output param. Set to TRUE in rdk_gstreamer_utils function stub
1019 : bool result =
1020 3 : m_rdkGstreamerUtilsWrapper
1021 6 : ->performAudioTrackCodecChannelSwitch(&m_context.playbackGroup, &sampleAttributes, &(*audioAttributes),
1022 : &status, &ui32Delay, &audioChangeTargetPts, ¤tDispPts,
1023 : &audioChangeStage,
1024 : &caps, // may fail for amlogic - that implementation changes
1025 : // this parameter, it's probably used by Netflix later
1026 3 : &audioAac, svpEnabled, GST_ELEMENT(appSrc), &retVal);
1027 :
1028 3 : if (!result || !retVal)
1029 : {
1030 3 : RIALTO_SERVER_LOG_WARN("performAudioTrackCodecChannelSwitch failed! Result: %d, retval %d", result, retVal);
1031 : }
1032 : }
1033 : else
1034 : {
1035 1 : RIALTO_SERVER_LOG_DEBUG("Skip switching audio source - caps are the same.");
1036 : }
1037 :
1038 4 : m_context.lastAudioSampleTimestamps = currentDispPts;
1039 4 : if (caps)
1040 4 : m_gstWrapper->gstCapsUnref(caps);
1041 4 : if (oldCaps)
1042 4 : m_gstWrapper->gstCapsUnref(oldCaps);
1043 :
1044 4 : return true;
1045 5 : }
1046 :
1047 84 : void GstGenericPlayer::scheduleNeedMediaData(GstAppSrc *src)
1048 : {
1049 84 : if (m_workerThread)
1050 : {
1051 84 : m_workerThread->enqueueTask(m_taskFactory->createNeedData(m_context, *this, src));
1052 : }
1053 : }
1054 :
1055 1 : void GstGenericPlayer::scheduleEnoughData(GstAppSrc *src)
1056 : {
1057 1 : if (m_workerThread)
1058 : {
1059 1 : m_workerThread->enqueueTask(m_taskFactory->createEnoughData(m_context, src));
1060 : }
1061 : }
1062 :
1063 3 : void GstGenericPlayer::scheduleAudioUnderflow()
1064 : {
1065 3 : if (m_workerThread)
1066 : {
1067 3 : bool underflowEnabled = m_context.isPlaying && !m_context.audioSourceRemoved;
1068 6 : m_workerThread->enqueueTask(
1069 6 : m_taskFactory->createUnderflow(m_context, *this, underflowEnabled, MediaSourceType::AUDIO));
1070 : }
1071 3 : }
1072 :
1073 2 : void GstGenericPlayer::scheduleVideoUnderflow()
1074 : {
1075 2 : if (m_workerThread)
1076 : {
1077 2 : bool underflowEnabled = m_context.isPlaying;
1078 4 : m_workerThread->enqueueTask(
1079 4 : m_taskFactory->createUnderflow(m_context, *this, underflowEnabled, MediaSourceType::VIDEO));
1080 : }
1081 2 : }
1082 :
1083 1 : void GstGenericPlayer::scheduleAllSourcesAttached()
1084 : {
1085 1 : allSourcesAttached();
1086 : }
1087 :
1088 13 : void GstGenericPlayer::cancelUnderflow(firebolt::rialto::MediaSourceType mediaSource)
1089 : {
1090 13 : auto elem = m_context.streamInfo.find(mediaSource);
1091 13 : if (elem != m_context.streamInfo.end())
1092 : {
1093 13 : StreamInfo &streamInfo = elem->second;
1094 13 : if (!streamInfo.underflowOccured)
1095 : {
1096 10 : return;
1097 : }
1098 :
1099 3 : RIALTO_SERVER_LOG_DEBUG("Cancelling %s underflow", common::convertMediaSourceType(mediaSource));
1100 3 : streamInfo.underflowOccured = false;
1101 : }
1102 : }
1103 :
1104 1 : void GstGenericPlayer::play()
1105 : {
1106 1 : if (m_workerThread)
1107 : {
1108 1 : m_workerThread->enqueueTask(m_taskFactory->createPlay(*this));
1109 : }
1110 : }
1111 :
1112 1 : void GstGenericPlayer::pause()
1113 : {
1114 1 : if (m_workerThread)
1115 : {
1116 1 : m_workerThread->enqueueTask(m_taskFactory->createPause(m_context, *this));
1117 : }
1118 : }
1119 :
1120 1 : void GstGenericPlayer::stop()
1121 : {
1122 1 : if (m_workerThread)
1123 : {
1124 1 : m_workerThread->enqueueTask(m_taskFactory->createStop(m_context, *this));
1125 : }
1126 : }
1127 :
1128 4 : bool GstGenericPlayer::changePipelineState(GstState newState)
1129 : {
1130 4 : if (!m_context.pipeline)
1131 : {
1132 1 : RIALTO_SERVER_LOG_ERROR("Change state failed - pipeline is nullptr");
1133 1 : if (m_gstPlayerClient)
1134 1 : m_gstPlayerClient->notifyPlaybackState(PlaybackState::FAILURE);
1135 1 : return false;
1136 : }
1137 3 : if (m_gstWrapper->gstElementSetState(m_context.pipeline, newState) == GST_STATE_CHANGE_FAILURE)
1138 : {
1139 1 : RIALTO_SERVER_LOG_ERROR("Change state failed - Gstreamer returned an error");
1140 1 : if (m_gstPlayerClient)
1141 1 : m_gstPlayerClient->notifyPlaybackState(PlaybackState::FAILURE);
1142 1 : return false;
1143 : }
1144 2 : return true;
1145 : }
1146 :
1147 1 : void GstGenericPlayer::setVideoGeometry(int x, int y, int width, int height)
1148 : {
1149 1 : if (m_workerThread)
1150 : {
1151 2 : m_workerThread->enqueueTask(
1152 2 : m_taskFactory->createSetVideoGeometry(m_context, *this, Rectangle{x, y, width, height}));
1153 : }
1154 1 : }
1155 :
1156 1 : void GstGenericPlayer::setEos(const firebolt::rialto::MediaSourceType &type)
1157 : {
1158 1 : if (m_workerThread)
1159 : {
1160 1 : m_workerThread->enqueueTask(m_taskFactory->createEos(m_context, *this, type));
1161 : }
1162 : }
1163 :
1164 4 : bool GstGenericPlayer::setVideoSinkRectangle()
1165 : {
1166 4 : bool result = false;
1167 4 : GstElement *videoSink{getSink(MediaSourceType::VIDEO)};
1168 4 : if (videoSink)
1169 : {
1170 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "rectangle"))
1171 : {
1172 : std::string rect =
1173 4 : std::to_string(m_context.pendingGeometry.x) + ',' + std::to_string(m_context.pendingGeometry.y) + ',' +
1174 8 : std::to_string(m_context.pendingGeometry.width) + ',' + std::to_string(m_context.pendingGeometry.height);
1175 2 : m_glibWrapper->gObjectSet(videoSink, "rectangle", rect.c_str(), nullptr);
1176 2 : m_context.pendingGeometry.clear();
1177 2 : result = true;
1178 : }
1179 : else
1180 : {
1181 1 : RIALTO_SERVER_LOG_ERROR("Failed to set the video rectangle");
1182 : }
1183 3 : m_gstWrapper->gstObjectUnref(videoSink);
1184 : }
1185 :
1186 4 : return result;
1187 : }
1188 :
1189 3 : bool GstGenericPlayer::setImmediateOutput()
1190 : {
1191 3 : bool result{false};
1192 3 : if (m_context.pendingImmediateOutputForVideo.has_value())
1193 : {
1194 3 : GstElement *sink{getSink(MediaSourceType::VIDEO)};
1195 3 : if (sink)
1196 : {
1197 2 : bool immediateOutput{m_context.pendingImmediateOutputForVideo.value()};
1198 2 : RIALTO_SERVER_LOG_DEBUG("Set immediate-output to %s", immediateOutput ? "TRUE" : "FALSE");
1199 :
1200 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "immediate-output"))
1201 : {
1202 1 : gboolean immediateOutputGboolean{immediateOutput ? TRUE : FALSE};
1203 1 : m_glibWrapper->gObjectSet(sink, "immediate-output", immediateOutputGboolean, nullptr);
1204 1 : result = true;
1205 : }
1206 : else
1207 : {
1208 1 : RIALTO_SERVER_LOG_ERROR("Failed to set immediate-output property on sink '%s'", GST_ELEMENT_NAME(sink));
1209 : }
1210 2 : m_context.pendingImmediateOutputForVideo.reset();
1211 2 : m_gstWrapper->gstObjectUnref(sink);
1212 : }
1213 : else
1214 : {
1215 1 : RIALTO_SERVER_LOG_DEBUG("Pending an immediate-output, sink is NULL");
1216 : }
1217 : }
1218 3 : return result;
1219 : }
1220 :
1221 4 : bool GstGenericPlayer::setLowLatency()
1222 : {
1223 4 : bool result{false};
1224 4 : if (m_context.pendingLowLatency.has_value())
1225 : {
1226 4 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1227 4 : if (sink)
1228 : {
1229 3 : bool lowLatency{m_context.pendingLowLatency.value()};
1230 3 : RIALTO_SERVER_LOG_DEBUG("Set low-latency to %s", lowLatency ? "TRUE" : "FALSE");
1231 :
1232 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "low-latency"))
1233 : {
1234 2 : gboolean lowLatencyGboolean{lowLatency ? TRUE : FALSE};
1235 2 : m_glibWrapper->gObjectSet(sink, "low-latency", lowLatencyGboolean, nullptr);
1236 2 : result = true;
1237 : }
1238 : else
1239 : {
1240 1 : RIALTO_SERVER_LOG_ERROR("Failed to set low-latency property on sink '%s'", GST_ELEMENT_NAME(sink));
1241 : }
1242 3 : m_context.pendingLowLatency.reset();
1243 3 : m_gstWrapper->gstObjectUnref(sink);
1244 : }
1245 : else
1246 : {
1247 1 : RIALTO_SERVER_LOG_DEBUG("Pending low-latency, sink is NULL");
1248 : }
1249 : }
1250 4 : return result;
1251 : }
1252 :
1253 3 : bool GstGenericPlayer::setSync()
1254 : {
1255 3 : bool result{false};
1256 3 : if (m_context.pendingSync.has_value())
1257 : {
1258 3 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1259 3 : if (sink)
1260 : {
1261 2 : bool sync{m_context.pendingSync.value()};
1262 2 : RIALTO_SERVER_LOG_DEBUG("Set sync to %s", sync ? "TRUE" : "FALSE");
1263 :
1264 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "sync"))
1265 : {
1266 1 : gboolean syncGboolean{sync ? TRUE : FALSE};
1267 1 : m_glibWrapper->gObjectSet(sink, "sync", syncGboolean, nullptr);
1268 1 : result = true;
1269 : }
1270 : else
1271 : {
1272 1 : RIALTO_SERVER_LOG_ERROR("Failed to set sync property on sink '%s'", GST_ELEMENT_NAME(sink));
1273 : }
1274 2 : m_context.pendingSync.reset();
1275 2 : m_gstWrapper->gstObjectUnref(sink);
1276 : }
1277 : else
1278 : {
1279 1 : RIALTO_SERVER_LOG_DEBUG("Pending sync, sink is NULL");
1280 : }
1281 : }
1282 3 : return result;
1283 : }
1284 :
1285 3 : bool GstGenericPlayer::setSyncOff()
1286 : {
1287 3 : bool result{false};
1288 3 : if (m_context.pendingSyncOff.has_value())
1289 : {
1290 3 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1291 3 : if (decoder)
1292 : {
1293 2 : bool syncOff{m_context.pendingSyncOff.value()};
1294 2 : RIALTO_SERVER_LOG_DEBUG("Set sync-off to %s", syncOff ? "TRUE" : "FALSE");
1295 :
1296 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "sync-off"))
1297 : {
1298 1 : gboolean syncOffGboolean{decoder ? TRUE : FALSE};
1299 1 : m_glibWrapper->gObjectSet(decoder, "sync-off", syncOffGboolean, nullptr);
1300 1 : result = true;
1301 : }
1302 : else
1303 : {
1304 1 : RIALTO_SERVER_LOG_ERROR("Failed to set sync-off property on decoder '%s'", GST_ELEMENT_NAME(decoder));
1305 : }
1306 2 : m_context.pendingSyncOff.reset();
1307 2 : m_gstWrapper->gstObjectUnref(decoder);
1308 : }
1309 : else
1310 : {
1311 1 : RIALTO_SERVER_LOG_DEBUG("Pending sync-off, decoder is NULL");
1312 : }
1313 : }
1314 3 : return result;
1315 : }
1316 :
1317 6 : bool GstGenericPlayer::setStreamSyncMode(const MediaSourceType &type)
1318 : {
1319 6 : bool result{false};
1320 6 : int32_t streamSyncMode{0};
1321 : {
1322 6 : std::unique_lock lock{m_context.propertyMutex};
1323 6 : if (m_context.pendingStreamSyncMode.find(type) == m_context.pendingStreamSyncMode.end())
1324 : {
1325 0 : return false;
1326 : }
1327 6 : streamSyncMode = m_context.pendingStreamSyncMode[type];
1328 : }
1329 6 : if (MediaSourceType::AUDIO == type)
1330 : {
1331 3 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1332 3 : if (!decoder)
1333 : {
1334 1 : RIALTO_SERVER_LOG_DEBUG("Pending stream-sync-mode, decoder is NULL");
1335 1 : return false;
1336 : }
1337 :
1338 2 : RIALTO_SERVER_LOG_DEBUG("Set stream-sync-mode to %d", streamSyncMode);
1339 :
1340 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "stream-sync-mode"))
1341 : {
1342 1 : gint streamSyncModeGint{static_cast<gint>(streamSyncMode)};
1343 1 : m_glibWrapper->gObjectSet(decoder, "stream-sync-mode", streamSyncModeGint, nullptr);
1344 1 : result = true;
1345 : }
1346 : else
1347 : {
1348 1 : RIALTO_SERVER_LOG_ERROR("Failed to set stream-sync-mode property on decoder '%s'", GST_ELEMENT_NAME(decoder));
1349 : }
1350 2 : m_gstWrapper->gstObjectUnref(decoder);
1351 2 : std::unique_lock lock{m_context.propertyMutex};
1352 2 : m_context.pendingStreamSyncMode.erase(type);
1353 : }
1354 3 : else if (MediaSourceType::VIDEO == type)
1355 : {
1356 3 : GstElement *parser = getParser(MediaSourceType::VIDEO);
1357 3 : if (!parser)
1358 : {
1359 1 : RIALTO_SERVER_LOG_DEBUG("Pending syncmode-streaming, parser is NULL");
1360 1 : return false;
1361 : }
1362 :
1363 2 : gboolean streamSyncModeBoolean{static_cast<gboolean>(streamSyncMode)};
1364 2 : RIALTO_SERVER_LOG_DEBUG("Set syncmode-streaming to %d", streamSyncMode);
1365 :
1366 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(parser), "syncmode-streaming"))
1367 : {
1368 1 : m_glibWrapper->gObjectSet(parser, "syncmode-streaming", streamSyncModeBoolean, nullptr);
1369 1 : result = true;
1370 : }
1371 : else
1372 : {
1373 1 : RIALTO_SERVER_LOG_ERROR("Failed to set syncmode-streaming property on parser '%s'", GST_ELEMENT_NAME(parser));
1374 : }
1375 2 : m_gstWrapper->gstObjectUnref(parser);
1376 2 : std::unique_lock lock{m_context.propertyMutex};
1377 2 : m_context.pendingStreamSyncMode.erase(type);
1378 : }
1379 4 : return result;
1380 : }
1381 :
1382 3 : bool GstGenericPlayer::setRenderFrame()
1383 : {
1384 3 : bool result{false};
1385 3 : if (m_context.pendingRenderFrame)
1386 : {
1387 3 : static const std::string kStepOnPrerollPropertyName = "frame-step-on-preroll";
1388 3 : GstElement *sink{getSink(MediaSourceType::VIDEO)};
1389 3 : if (sink)
1390 : {
1391 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), kStepOnPrerollPropertyName.c_str()))
1392 : {
1393 1 : RIALTO_SERVER_LOG_INFO("Rendering preroll");
1394 :
1395 1 : m_glibWrapper->gObjectSet(sink, kStepOnPrerollPropertyName.c_str(), 1, nullptr);
1396 1 : m_gstWrapper->gstElementSendEvent(sink, m_gstWrapper->gstEventNewStep(GST_FORMAT_BUFFERS, 1, 1.0, true,
1397 : false));
1398 1 : m_glibWrapper->gObjectSet(sink, kStepOnPrerollPropertyName.c_str(), 0, nullptr);
1399 1 : result = true;
1400 : }
1401 : else
1402 : {
1403 1 : RIALTO_SERVER_LOG_ERROR("Video sink doesn't have property `%s`", kStepOnPrerollPropertyName.c_str());
1404 : }
1405 2 : m_gstWrapper->gstObjectUnref(sink);
1406 2 : m_context.pendingRenderFrame = false;
1407 : }
1408 : else
1409 : {
1410 1 : RIALTO_SERVER_LOG_DEBUG("Pending render frame, sink is NULL");
1411 : }
1412 : }
1413 3 : return result;
1414 : }
1415 :
1416 3 : bool GstGenericPlayer::setBufferingLimit()
1417 : {
1418 3 : bool result{false};
1419 3 : guint bufferingLimit{0};
1420 : {
1421 3 : std::unique_lock lock{m_context.propertyMutex};
1422 3 : if (!m_context.pendingBufferingLimit.has_value())
1423 : {
1424 0 : return false;
1425 : }
1426 3 : bufferingLimit = static_cast<guint>(m_context.pendingBufferingLimit.value());
1427 : }
1428 :
1429 3 : GstElement *decoder{getDecoder(MediaSourceType::AUDIO)};
1430 3 : if (decoder)
1431 : {
1432 2 : RIALTO_SERVER_LOG_DEBUG("Set limit-buffering-ms to %u", bufferingLimit);
1433 :
1434 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "limit-buffering-ms"))
1435 : {
1436 1 : m_glibWrapper->gObjectSet(decoder, "limit-buffering-ms", bufferingLimit, nullptr);
1437 1 : result = true;
1438 : }
1439 : else
1440 : {
1441 1 : RIALTO_SERVER_LOG_ERROR("Failed to set limit-buffering-ms property on decoder '%s'",
1442 : GST_ELEMENT_NAME(decoder));
1443 : }
1444 2 : m_gstWrapper->gstObjectUnref(decoder);
1445 2 : std::unique_lock lock{m_context.propertyMutex};
1446 2 : m_context.pendingBufferingLimit.reset();
1447 : }
1448 : else
1449 : {
1450 1 : RIALTO_SERVER_LOG_DEBUG("Pending limit-buffering-ms, decoder is NULL");
1451 : }
1452 3 : return result;
1453 : }
1454 :
1455 2 : bool GstGenericPlayer::setUseBuffering()
1456 : {
1457 2 : std::unique_lock lock{m_context.propertyMutex};
1458 2 : if (m_context.pendingUseBuffering.has_value())
1459 : {
1460 2 : if (m_context.playbackGroup.m_curAudioDecodeBin)
1461 : {
1462 1 : gboolean useBufferingGboolean{m_context.pendingUseBuffering.value() ? TRUE : FALSE};
1463 1 : RIALTO_SERVER_LOG_DEBUG("Set use-buffering to %d", useBufferingGboolean);
1464 1 : m_glibWrapper->gObjectSet(m_context.playbackGroup.m_curAudioDecodeBin, "use-buffering",
1465 : useBufferingGboolean, nullptr);
1466 1 : m_context.pendingUseBuffering.reset();
1467 1 : return true;
1468 : }
1469 : else
1470 : {
1471 1 : RIALTO_SERVER_LOG_DEBUG("Pending use-buffering, decodebin is NULL");
1472 : }
1473 : }
1474 1 : return false;
1475 2 : }
1476 :
1477 8 : bool GstGenericPlayer::setWesterossinkSecondaryVideo()
1478 : {
1479 8 : bool result = false;
1480 8 : GstElementFactory *factory = m_gstWrapper->gstElementFactoryFind("westerossink");
1481 8 : if (factory)
1482 : {
1483 7 : GstElement *videoSink = m_gstWrapper->gstElementFactoryCreate(factory, nullptr);
1484 7 : if (videoSink)
1485 : {
1486 5 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "res-usage"))
1487 : {
1488 4 : m_glibWrapper->gObjectSet(videoSink, "res-usage", 0x0u, nullptr);
1489 4 : m_glibWrapper->gObjectSet(m_context.pipeline, "video-sink", videoSink, nullptr);
1490 4 : result = true;
1491 : }
1492 : else
1493 : {
1494 1 : RIALTO_SERVER_LOG_ERROR("Failed to set the westerossink res-usage");
1495 1 : m_gstWrapper->gstObjectUnref(GST_OBJECT(videoSink));
1496 : }
1497 : }
1498 : else
1499 : {
1500 2 : RIALTO_SERVER_LOG_ERROR("Failed to create the westerossink");
1501 : }
1502 :
1503 7 : m_gstWrapper->gstObjectUnref(GST_OBJECT(factory));
1504 : }
1505 : else
1506 : {
1507 : // No westeros sink
1508 1 : result = true;
1509 : }
1510 :
1511 8 : return result;
1512 : }
1513 :
1514 8 : bool GstGenericPlayer::setErmContext()
1515 : {
1516 8 : bool result = false;
1517 8 : GstContext *context = m_gstWrapper->gstContextNew("erm", false);
1518 8 : if (context)
1519 : {
1520 6 : GstStructure *contextStructure = m_gstWrapper->gstContextWritableStructure(context);
1521 6 : if (contextStructure)
1522 : {
1523 5 : m_gstWrapper->gstStructureSet(contextStructure, "res-usage", G_TYPE_UINT, 0x0u, nullptr);
1524 5 : m_gstWrapper->gstElementSetContext(GST_ELEMENT(m_context.pipeline), context);
1525 5 : result = true;
1526 : }
1527 : else
1528 : {
1529 1 : RIALTO_SERVER_LOG_ERROR("Failed to create the erm structure");
1530 : }
1531 6 : m_gstWrapper->gstContextUnref(context);
1532 : }
1533 : else
1534 : {
1535 2 : RIALTO_SERVER_LOG_ERROR("Failed to create the erm context");
1536 : }
1537 :
1538 8 : return result;
1539 : }
1540 :
1541 6 : void GstGenericPlayer::startPositionReportingAndCheckAudioUnderflowTimer()
1542 : {
1543 6 : if (m_positionReportingAndCheckAudioUnderflowTimer && m_positionReportingAndCheckAudioUnderflowTimer->isActive())
1544 : {
1545 1 : return;
1546 : }
1547 :
1548 10 : m_positionReportingAndCheckAudioUnderflowTimer = m_timerFactory->createTimer(
1549 : kPositionReportTimerMs,
1550 7 : [this]()
1551 : {
1552 1 : if (m_workerThread)
1553 : {
1554 1 : m_workerThread->enqueueTask(m_taskFactory->createReportPosition(m_context));
1555 1 : m_workerThread->enqueueTask(m_taskFactory->createCheckAudioUnderflow(m_context, *this));
1556 : }
1557 1 : },
1558 5 : firebolt::rialto::common::TimerType::PERIODIC);
1559 : }
1560 :
1561 4 : void GstGenericPlayer::stopPositionReportingAndCheckAudioUnderflowTimer()
1562 : {
1563 4 : if (m_positionReportingAndCheckAudioUnderflowTimer && m_positionReportingAndCheckAudioUnderflowTimer->isActive())
1564 : {
1565 1 : m_positionReportingAndCheckAudioUnderflowTimer->cancel();
1566 1 : m_positionReportingAndCheckAudioUnderflowTimer.reset();
1567 : }
1568 4 : }
1569 :
1570 2 : void GstGenericPlayer::stopWorkerThread()
1571 : {
1572 2 : if (m_workerThread)
1573 : {
1574 2 : m_workerThread->stop();
1575 : }
1576 : }
1577 :
1578 0 : void GstGenericPlayer::setPendingPlaybackRate()
1579 : {
1580 0 : RIALTO_SERVER_LOG_INFO("Setting pending playback rate");
1581 0 : setPlaybackRate(m_context.pendingPlaybackRate);
1582 : }
1583 :
1584 1 : void GstGenericPlayer::renderFrame()
1585 : {
1586 1 : if (m_workerThread)
1587 : {
1588 1 : m_workerThread->enqueueTask(m_taskFactory->createRenderFrame(m_context, *this));
1589 : }
1590 : }
1591 :
1592 16 : void GstGenericPlayer::setVolume(double targetVolume, uint32_t volumeDuration, firebolt::rialto::EaseType easeType)
1593 : {
1594 16 : if (m_workerThread)
1595 : {
1596 32 : m_workerThread->enqueueTask(
1597 32 : m_taskFactory->createSetVolume(m_context, *this, targetVolume, volumeDuration, easeType));
1598 : }
1599 16 : }
1600 :
1601 3 : bool GstGenericPlayer::getVolume(double ¤tVolume)
1602 : {
1603 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
1604 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
1605 3 : if (!m_context.pipeline)
1606 : {
1607 0 : return false;
1608 : }
1609 :
1610 : // NOTE: No gstreamer documentation for "fade-volume" could be found at the time this code was written.
1611 : // Therefore the author performed several tests on a supported platform (Flex2) to determine the behaviour of this property.
1612 : // The code has been written to be backwardly compatible on platforms that don't have this property.
1613 : // The observed behaviour was:
1614 : // - if the returned fade volume is negative then audio-fade is not active. In this case the usual technique
1615 : // to find volume in the pipeline works and is used.
1616 : // - if the returned fade volume is positive then audio-fade is active. In this case the returned fade volume
1617 : // directly returns the current volume level 0=min to 100=max (and the pipeline's current volume level is
1618 : // meaningless and doesn't contribute in this case).
1619 3 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1620 5 : if (m_context.audioFadeEnabled && sink &&
1621 2 : m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "fade-volume"))
1622 : {
1623 2 : gint fadeVolume{-100};
1624 2 : m_glibWrapper->gObjectGet(sink, "fade-volume", &fadeVolume, NULL);
1625 2 : if (fadeVolume < 0)
1626 : {
1627 1 : currentVolume = m_gstWrapper->gstStreamVolumeGetVolume(GST_STREAM_VOLUME(m_context.pipeline),
1628 : GST_STREAM_VOLUME_FORMAT_LINEAR);
1629 1 : RIALTO_SERVER_LOG_INFO("Fade volume is negative, using volume from pipeline: %f", currentVolume);
1630 : }
1631 : else
1632 : {
1633 1 : currentVolume = static_cast<double>(fadeVolume) / 100.0;
1634 1 : RIALTO_SERVER_LOG_INFO("Fade volume is supported: %f", currentVolume);
1635 : }
1636 : }
1637 : else
1638 : {
1639 1 : currentVolume = m_gstWrapper->gstStreamVolumeGetVolume(GST_STREAM_VOLUME(m_context.pipeline),
1640 : GST_STREAM_VOLUME_FORMAT_LINEAR);
1641 1 : RIALTO_SERVER_LOG_INFO("Fade volume is not supported, using volume from pipeline: %f", currentVolume);
1642 : }
1643 :
1644 3 : if (sink)
1645 2 : m_gstWrapper->gstObjectUnref(sink);
1646 :
1647 3 : return true;
1648 : }
1649 :
1650 1 : void GstGenericPlayer::setMute(const MediaSourceType &mediaSourceType, bool mute)
1651 : {
1652 1 : if (m_workerThread)
1653 : {
1654 1 : m_workerThread->enqueueTask(m_taskFactory->createSetMute(m_context, *this, mediaSourceType, mute));
1655 : }
1656 : }
1657 :
1658 5 : bool GstGenericPlayer::getMute(const MediaSourceType &mediaSourceType, bool &mute)
1659 : {
1660 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
1661 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
1662 5 : if (mediaSourceType == MediaSourceType::SUBTITLE)
1663 : {
1664 2 : if (!m_context.subtitleSink)
1665 : {
1666 1 : RIALTO_SERVER_LOG_ERROR("There is no subtitle sink");
1667 1 : return false;
1668 : }
1669 1 : gboolean muteValue{FALSE};
1670 1 : m_glibWrapper->gObjectGet(m_context.subtitleSink, "mute", &muteValue, nullptr);
1671 1 : mute = muteValue;
1672 : }
1673 3 : else if (mediaSourceType == MediaSourceType::AUDIO)
1674 : {
1675 2 : if (!m_context.pipeline)
1676 : {
1677 1 : return false;
1678 : }
1679 1 : mute = m_gstWrapper->gstStreamVolumeGetMute(GST_STREAM_VOLUME(m_context.pipeline));
1680 : }
1681 : else
1682 : {
1683 1 : RIALTO_SERVER_LOG_ERROR("Getting mute for type %s unsupported", common::convertMediaSourceType(mediaSourceType));
1684 1 : return false;
1685 : }
1686 :
1687 2 : return true;
1688 : }
1689 :
1690 1 : void GstGenericPlayer::setTextTrackIdentifier(const std::string &textTrackIdentifier)
1691 : {
1692 1 : if (m_workerThread)
1693 : {
1694 1 : m_workerThread->enqueueTask(m_taskFactory->createSetTextTrackIdentifier(m_context, textTrackIdentifier));
1695 : }
1696 : }
1697 :
1698 3 : bool GstGenericPlayer::getTextTrackIdentifier(std::string &textTrackIdentifier)
1699 : {
1700 3 : if (!m_context.subtitleSink)
1701 : {
1702 1 : RIALTO_SERVER_LOG_ERROR("There is no subtitle sink");
1703 1 : return false;
1704 : }
1705 :
1706 2 : gchar *identifier = nullptr;
1707 2 : m_glibWrapper->gObjectGet(m_context.subtitleSink, "text-track-identifier", &identifier, nullptr);
1708 :
1709 2 : if (identifier)
1710 : {
1711 1 : textTrackIdentifier = identifier;
1712 1 : m_glibWrapper->gFree(identifier);
1713 1 : return true;
1714 : }
1715 : else
1716 : {
1717 1 : RIALTO_SERVER_LOG_ERROR("Failed to get text track identifier");
1718 1 : return false;
1719 : }
1720 : }
1721 :
1722 1 : bool GstGenericPlayer::setLowLatency(bool lowLatency)
1723 : {
1724 1 : if (m_workerThread)
1725 : {
1726 1 : m_workerThread->enqueueTask(m_taskFactory->createSetLowLatency(m_context, *this, lowLatency));
1727 : }
1728 1 : return true;
1729 : }
1730 :
1731 1 : bool GstGenericPlayer::setSync(bool sync)
1732 : {
1733 1 : if (m_workerThread)
1734 : {
1735 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSync(m_context, *this, sync));
1736 : }
1737 1 : return true;
1738 : }
1739 :
1740 4 : bool GstGenericPlayer::getSync(bool &sync)
1741 : {
1742 4 : bool returnValue{false};
1743 4 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1744 4 : if (sink)
1745 : {
1746 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "sync"))
1747 : {
1748 1 : m_glibWrapper->gObjectGet(sink, "sync", &sync, nullptr);
1749 1 : returnValue = true;
1750 : }
1751 : else
1752 : {
1753 1 : RIALTO_SERVER_LOG_ERROR("Sync not supported in sink '%s'", GST_ELEMENT_NAME(sink));
1754 : }
1755 2 : m_gstWrapper->gstObjectUnref(sink);
1756 : }
1757 2 : else if (m_context.pendingSync.has_value())
1758 : {
1759 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
1760 1 : sync = m_context.pendingSync.value();
1761 1 : returnValue = true;
1762 : }
1763 : else
1764 : {
1765 : // We dont know the default setting on the sync, so return failure here
1766 1 : RIALTO_SERVER_LOG_WARN("No audio sink attached or queued value");
1767 : }
1768 :
1769 4 : return returnValue;
1770 : }
1771 :
1772 1 : bool GstGenericPlayer::setSyncOff(bool syncOff)
1773 : {
1774 1 : if (m_workerThread)
1775 : {
1776 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSyncOff(m_context, *this, syncOff));
1777 : }
1778 1 : return true;
1779 : }
1780 :
1781 1 : bool GstGenericPlayer::setStreamSyncMode(const MediaSourceType &mediaSourceType, int32_t streamSyncMode)
1782 : {
1783 1 : if (m_workerThread)
1784 : {
1785 2 : m_workerThread->enqueueTask(
1786 2 : m_taskFactory->createSetStreamSyncMode(m_context, *this, mediaSourceType, streamSyncMode));
1787 : }
1788 1 : return true;
1789 : }
1790 :
1791 5 : bool GstGenericPlayer::getStreamSyncMode(int32_t &streamSyncMode)
1792 : {
1793 5 : bool returnValue{false};
1794 5 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1795 5 : if (decoder && m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "stream-sync-mode"))
1796 : {
1797 2 : m_glibWrapper->gObjectGet(decoder, "stream-sync-mode", &streamSyncMode, nullptr);
1798 2 : returnValue = true;
1799 : }
1800 : else
1801 : {
1802 3 : std::unique_lock lock{m_context.propertyMutex};
1803 3 : if (m_context.pendingStreamSyncMode.find(MediaSourceType::AUDIO) != m_context.pendingStreamSyncMode.end())
1804 : {
1805 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
1806 1 : streamSyncMode = m_context.pendingStreamSyncMode[MediaSourceType::AUDIO];
1807 1 : returnValue = true;
1808 : }
1809 : else
1810 : {
1811 2 : RIALTO_SERVER_LOG_ERROR("Stream sync mode not supported in decoder '%s'",
1812 : (decoder ? GST_ELEMENT_NAME(decoder) : "null"));
1813 : }
1814 3 : }
1815 :
1816 5 : if (decoder)
1817 3 : m_gstWrapper->gstObjectUnref(GST_OBJECT(decoder));
1818 :
1819 5 : return returnValue;
1820 : }
1821 :
1822 1 : void GstGenericPlayer::ping(std::unique_ptr<IHeartbeatHandler> &&heartbeatHandler)
1823 : {
1824 1 : if (m_workerThread)
1825 : {
1826 1 : m_workerThread->enqueueTask(m_taskFactory->createPing(std::move(heartbeatHandler)));
1827 : }
1828 : }
1829 :
1830 1 : void GstGenericPlayer::flush(const MediaSourceType &mediaSourceType, bool resetTime)
1831 : {
1832 1 : if (m_workerThread)
1833 : {
1834 1 : m_workerThread->enqueueTask(m_taskFactory->createFlush(m_context, *this, mediaSourceType, resetTime));
1835 : }
1836 : }
1837 :
1838 1 : void GstGenericPlayer::setSourcePosition(const MediaSourceType &mediaSourceType, int64_t position, bool resetTime,
1839 : double appliedRate, uint64_t stopPosition)
1840 : {
1841 1 : if (m_workerThread)
1842 : {
1843 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSourcePosition(m_context, *this, mediaSourceType, position,
1844 : resetTime, appliedRate, stopPosition));
1845 : }
1846 : }
1847 :
1848 1 : void GstGenericPlayer::processAudioGap(int64_t position, uint32_t duration, int64_t discontinuityGap, bool audioAac)
1849 : {
1850 1 : if (m_workerThread)
1851 : {
1852 2 : m_workerThread->enqueueTask(
1853 2 : m_taskFactory->createProcessAudioGap(m_context, position, duration, discontinuityGap, audioAac));
1854 : }
1855 1 : }
1856 :
1857 1 : void GstGenericPlayer::setBufferingLimit(uint32_t limitBufferingMs)
1858 : {
1859 1 : if (m_workerThread)
1860 : {
1861 1 : m_workerThread->enqueueTask(m_taskFactory->createSetBufferingLimit(m_context, *this, limitBufferingMs));
1862 : }
1863 : }
1864 :
1865 5 : bool GstGenericPlayer::getBufferingLimit(uint32_t &limitBufferingMs)
1866 : {
1867 5 : bool returnValue{false};
1868 5 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1869 5 : if (decoder && m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "limit-buffering-ms"))
1870 : {
1871 2 : m_glibWrapper->gObjectGet(decoder, "limit-buffering-ms", &limitBufferingMs, nullptr);
1872 2 : returnValue = true;
1873 : }
1874 : else
1875 : {
1876 3 : std::unique_lock lock{m_context.propertyMutex};
1877 3 : if (m_context.pendingBufferingLimit.has_value())
1878 : {
1879 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
1880 1 : limitBufferingMs = m_context.pendingBufferingLimit.value();
1881 1 : returnValue = true;
1882 : }
1883 : else
1884 : {
1885 2 : RIALTO_SERVER_LOG_ERROR("buffering limit not supported in decoder '%s'",
1886 : (decoder ? GST_ELEMENT_NAME(decoder) : "null"));
1887 : }
1888 3 : }
1889 :
1890 5 : if (decoder)
1891 3 : m_gstWrapper->gstObjectUnref(GST_OBJECT(decoder));
1892 :
1893 5 : return returnValue;
1894 : }
1895 :
1896 1 : void GstGenericPlayer::setUseBuffering(bool useBuffering)
1897 : {
1898 1 : if (m_workerThread)
1899 : {
1900 1 : m_workerThread->enqueueTask(m_taskFactory->createSetUseBuffering(m_context, *this, useBuffering));
1901 : }
1902 : }
1903 :
1904 3 : bool GstGenericPlayer::getUseBuffering(bool &useBuffering)
1905 : {
1906 3 : if (m_context.playbackGroup.m_curAudioDecodeBin)
1907 : {
1908 1 : m_glibWrapper->gObjectGet(m_context.playbackGroup.m_curAudioDecodeBin, "use-buffering", &useBuffering, nullptr);
1909 1 : return true;
1910 : }
1911 : else
1912 : {
1913 2 : std::unique_lock lock{m_context.propertyMutex};
1914 2 : if (m_context.pendingUseBuffering.has_value())
1915 : {
1916 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
1917 1 : useBuffering = m_context.pendingUseBuffering.value();
1918 1 : return true;
1919 : }
1920 2 : }
1921 1 : return false;
1922 : }
1923 :
1924 1 : void GstGenericPlayer::switchSource(const std::unique_ptr<IMediaPipeline::MediaSource> &mediaSource)
1925 : {
1926 1 : if (m_workerThread)
1927 : {
1928 1 : m_workerThread->enqueueTask(m_taskFactory->createSwitchSource(*this, mediaSource));
1929 : }
1930 : }
1931 :
1932 1 : void GstGenericPlayer::handleBusMessage(GstMessage *message)
1933 : {
1934 1 : m_workerThread->enqueueTask(m_taskFactory->createHandleBusMessage(m_context, *this, message));
1935 : }
1936 :
1937 1 : void GstGenericPlayer::updatePlaybackGroup(GstElement *typefind, const GstCaps *caps)
1938 : {
1939 1 : m_workerThread->enqueueTask(m_taskFactory->createUpdatePlaybackGroup(m_context, *this, typefind, caps));
1940 : }
1941 :
1942 3 : void GstGenericPlayer::addAutoVideoSinkChild(GObject *object)
1943 : {
1944 : // Only add children that are sinks
1945 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
1946 : {
1947 2 : RIALTO_SERVER_LOG_DEBUG("Store AutoVideoSink child sink");
1948 :
1949 2 : if (m_context.autoVideoChildSink && m_context.autoVideoChildSink != GST_ELEMENT(object))
1950 : {
1951 1 : RIALTO_SERVER_LOG_MIL("AutoVideoSink child is been overwritten");
1952 : }
1953 2 : m_context.autoVideoChildSink = GST_ELEMENT(object);
1954 : }
1955 3 : }
1956 :
1957 3 : void GstGenericPlayer::addAutoAudioSinkChild(GObject *object)
1958 : {
1959 : // Only add children that are sinks
1960 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
1961 : {
1962 2 : RIALTO_SERVER_LOG_DEBUG("Store AutoAudioSink child sink");
1963 :
1964 2 : if (m_context.autoAudioChildSink && m_context.autoAudioChildSink != GST_ELEMENT(object))
1965 : {
1966 1 : RIALTO_SERVER_LOG_MIL("AutoAudioSink child is been overwritten");
1967 : }
1968 2 : m_context.autoAudioChildSink = GST_ELEMENT(object);
1969 : }
1970 3 : }
1971 :
1972 3 : void GstGenericPlayer::removeAutoVideoSinkChild(GObject *object)
1973 : {
1974 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
1975 : {
1976 3 : RIALTO_SERVER_LOG_DEBUG("Remove AutoVideoSink child sink");
1977 :
1978 3 : if (m_context.autoVideoChildSink && m_context.autoVideoChildSink != GST_ELEMENT(object))
1979 : {
1980 1 : RIALTO_SERVER_LOG_MIL("AutoVideoSink child sink is not the same as the one stored");
1981 1 : return;
1982 : }
1983 :
1984 2 : m_context.autoVideoChildSink = nullptr;
1985 : }
1986 : }
1987 :
1988 3 : void GstGenericPlayer::removeAutoAudioSinkChild(GObject *object)
1989 : {
1990 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
1991 : {
1992 3 : RIALTO_SERVER_LOG_DEBUG("Remove AutoAudioSink child sink");
1993 :
1994 3 : if (m_context.autoAudioChildSink && m_context.autoAudioChildSink != GST_ELEMENT(object))
1995 : {
1996 1 : RIALTO_SERVER_LOG_MIL("AutoAudioSink child sink is not the same as the one stored");
1997 1 : return;
1998 : }
1999 :
2000 2 : m_context.autoAudioChildSink = nullptr;
2001 : }
2002 : }
2003 :
2004 11 : GstElement *GstGenericPlayer::getSinkChildIfAutoVideoSink(GstElement *sink) const
2005 : {
2006 11 : const gchar *kTmpName = m_glibWrapper->gTypeName(G_OBJECT_TYPE(sink));
2007 11 : if (!kTmpName)
2008 0 : return sink;
2009 :
2010 11 : const std::string kElementTypeName{kTmpName};
2011 11 : if (kElementTypeName == "GstAutoVideoSink")
2012 : {
2013 1 : if (!m_context.autoVideoChildSink)
2014 : {
2015 0 : RIALTO_SERVER_LOG_WARN("No child sink has been added to the autovideosink");
2016 : }
2017 : else
2018 : {
2019 1 : return m_context.autoVideoChildSink;
2020 : }
2021 : }
2022 10 : return sink;
2023 11 : }
2024 :
2025 11 : GstElement *GstGenericPlayer::getSinkChildIfAutoAudioSink(GstElement *sink) const
2026 : {
2027 11 : const gchar *kTmpName = m_glibWrapper->gTypeName(G_OBJECT_TYPE(sink));
2028 11 : if (!kTmpName)
2029 0 : return sink;
2030 :
2031 11 : const std::string kElementTypeName{kTmpName};
2032 11 : if (kElementTypeName == "GstAutoAudioSink")
2033 : {
2034 1 : if (!m_context.autoAudioChildSink)
2035 : {
2036 0 : RIALTO_SERVER_LOG_WARN("No child sink has been added to the autoaudiosink");
2037 : }
2038 : else
2039 : {
2040 1 : return m_context.autoAudioChildSink;
2041 : }
2042 : }
2043 10 : return sink;
2044 11 : }
2045 :
2046 200 : void GstGenericPlayer::setPlaybinFlags(bool enableAudio)
2047 : {
2048 200 : unsigned flags = getGstPlayFlag("video") | getGstPlayFlag("native-video") | getGstPlayFlag("text");
2049 :
2050 200 : if (enableAudio)
2051 : {
2052 200 : flags |= getGstPlayFlag("audio");
2053 200 : flags |= shouldEnableNativeAudio() ? getGstPlayFlag("native-audio") : 0;
2054 : }
2055 :
2056 200 : m_glibWrapper->gObjectSet(m_context.pipeline, "flags", flags, nullptr);
2057 : }
2058 :
2059 200 : bool GstGenericPlayer::shouldEnableNativeAudio()
2060 : {
2061 200 : GstElementFactory *factory = m_gstWrapper->gstElementFactoryFind("brcmaudiosink");
2062 200 : if (factory)
2063 : {
2064 1 : m_gstWrapper->gstObjectUnref(GST_OBJECT(factory));
2065 1 : return true;
2066 : }
2067 199 : return false;
2068 : }
2069 :
2070 : }; // namespace firebolt::rialto::server
|