Line data Source code
1 : /*
2 : * If not stated otherwise in this file or this component's LICENSE file the
3 : * following copyright and licenses apply:
4 : *
5 : * Copyright 2022 Sky UK
6 : *
7 : * Licensed under the Apache License, Version 2.0 (the "License");
8 : * you may not use this file except in compliance with the License.
9 : * You may obtain a copy of the License at
10 : *
11 : * http://www.apache.org/licenses/LICENSE-2.0
12 : *
13 : * Unless required by applicable law or agreed to in writing, software
14 : * distributed under the License is distributed on an "AS IS" BASIS,
15 : * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 : * See the License for the specific language governing permissions and
17 : * limitations under the License.
18 : */
19 :
20 : #include <chrono>
21 : #include <cinttypes>
22 : #include <stdexcept>
23 :
24 : #include "GstDispatcherThread.h"
25 : #include "GstGenericPlayer.h"
26 : #include "GstProtectionMetadata.h"
27 : #include "IGstTextTrackSinkFactory.h"
28 : #include "IMediaPipeline.h"
29 : #include "ITimer.h"
30 : #include "RialtoServerLogging.h"
31 : #include "TypeConverters.h"
32 : #include "Utils.h"
33 : #include "WorkerThread.h"
34 : #include "tasks/generic/GenericPlayerTaskFactory.h"
35 :
36 : namespace
37 : {
38 : /**
39 : * @brief Report position interval in ms.
40 : * The position reporting timer should be started whenever the PLAYING state is entered and stopped
41 : * whenever the session moves to another playback state.
42 : */
43 : constexpr std::chrono::milliseconds kPositionReportTimerMs{250};
44 :
45 1 : bool operator==(const firebolt::rialto::server::SegmentData &lhs, const firebolt::rialto::server::SegmentData &rhs)
46 : {
47 2 : return (lhs.position == rhs.position) && (lhs.resetTime == rhs.resetTime) && (lhs.appliedRate == rhs.appliedRate) &&
48 2 : (lhs.stopPosition == rhs.stopPosition);
49 : }
50 : } // namespace
51 :
52 : namespace firebolt::rialto::server
53 : {
54 : std::weak_ptr<IGstGenericPlayerFactory> GstGenericPlayerFactory::m_factory;
55 :
56 3 : std::shared_ptr<IGstGenericPlayerFactory> IGstGenericPlayerFactory::getFactory()
57 : {
58 3 : std::shared_ptr<IGstGenericPlayerFactory> factory = GstGenericPlayerFactory::m_factory.lock();
59 :
60 3 : if (!factory)
61 : {
62 : try
63 : {
64 3 : factory = std::make_shared<GstGenericPlayerFactory>();
65 : }
66 0 : catch (const std::exception &e)
67 : {
68 0 : RIALTO_SERVER_LOG_ERROR("Failed to create the gstreamer player factory, reason: %s", e.what());
69 : }
70 :
71 3 : GstGenericPlayerFactory::m_factory = factory;
72 : }
73 :
74 3 : return factory;
75 : }
76 :
77 1 : std::unique_ptr<IGstGenericPlayer> GstGenericPlayerFactory::createGstGenericPlayer(
78 : IGstGenericPlayerClient *client, IDecryptionService &decryptionService, MediaType type,
79 : const VideoRequirements &videoRequirements,
80 : const std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapperFactory> &rdkGstreamerUtilsWrapperFactory)
81 : {
82 1 : std::unique_ptr<IGstGenericPlayer> gstPlayer;
83 :
84 : try
85 : {
86 1 : auto gstWrapperFactory = firebolt::rialto::wrappers::IGstWrapperFactory::getFactory();
87 1 : auto glibWrapperFactory = firebolt::rialto::wrappers::IGlibWrapperFactory::getFactory();
88 1 : std::shared_ptr<firebolt::rialto::wrappers::IGstWrapper> gstWrapper;
89 1 : std::shared_ptr<firebolt::rialto::wrappers::IGlibWrapper> glibWrapper;
90 1 : std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapper> rdkGstreamerUtilsWrapper;
91 1 : if ((!gstWrapperFactory) || (!(gstWrapper = gstWrapperFactory->getGstWrapper())))
92 : {
93 0 : throw std::runtime_error("Cannot create GstWrapper");
94 : }
95 1 : if ((!glibWrapperFactory) || (!(glibWrapper = glibWrapperFactory->getGlibWrapper())))
96 : {
97 0 : throw std::runtime_error("Cannot create GlibWrapper");
98 : }
99 2 : if ((!rdkGstreamerUtilsWrapperFactory) ||
100 2 : (!(rdkGstreamerUtilsWrapper = rdkGstreamerUtilsWrapperFactory->createRdkGstreamerUtilsWrapper())))
101 : {
102 0 : throw std::runtime_error("Cannot create RdkGstreamerUtilsWrapper");
103 : }
104 : gstPlayer = std::make_unique<
105 2 : GstGenericPlayer>(client, decryptionService, type, videoRequirements, gstWrapper, glibWrapper,
106 2 : rdkGstreamerUtilsWrapper, IGstInitialiser::instance(), IGstSrcFactory::getFactory(),
107 2 : common::ITimerFactory::getFactory(),
108 2 : std::make_unique<GenericPlayerTaskFactory>(client, gstWrapper, glibWrapper,
109 : rdkGstreamerUtilsWrapper,
110 2 : IGstTextTrackSinkFactory::createFactory()),
111 2 : std::make_unique<WorkerThreadFactory>(), std::make_unique<GstDispatcherThreadFactory>(),
112 3 : IGstProtectionMetadataHelperFactory::createFactory());
113 1 : }
114 0 : catch (const std::exception &e)
115 : {
116 0 : RIALTO_SERVER_LOG_ERROR("Failed to create the gstreamer player, reason: %s", e.what());
117 : }
118 :
119 1 : return gstPlayer;
120 : }
121 :
122 205 : GstGenericPlayer::GstGenericPlayer(
123 : IGstGenericPlayerClient *client, IDecryptionService &decryptionService, MediaType type,
124 : const VideoRequirements &videoRequirements,
125 : const std::shared_ptr<firebolt::rialto::wrappers::IGstWrapper> &gstWrapper,
126 : const std::shared_ptr<firebolt::rialto::wrappers::IGlibWrapper> &glibWrapper,
127 : const std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapper> &rdkGstreamerUtilsWrapper,
128 : const IGstInitialiser &gstInitialiser, const std::shared_ptr<IGstSrcFactory> &gstSrcFactory,
129 : std::shared_ptr<common::ITimerFactory> timerFactory, std::unique_ptr<IGenericPlayerTaskFactory> taskFactory,
130 : std::unique_ptr<IWorkerThreadFactory> workerThreadFactory,
131 : std::unique_ptr<IGstDispatcherThreadFactory> gstDispatcherThreadFactory,
132 205 : std::shared_ptr<IGstProtectionMetadataHelperFactory> gstProtectionMetadataFactory)
133 205 : : m_gstPlayerClient(client), m_gstWrapper{gstWrapper}, m_glibWrapper{glibWrapper},
134 410 : m_rdkGstreamerUtilsWrapper{rdkGstreamerUtilsWrapper}, m_timerFactory{timerFactory},
135 615 : m_taskFactory{std::move(taskFactory)}
136 : {
137 205 : RIALTO_SERVER_LOG_DEBUG("GstGenericPlayer is constructed.");
138 :
139 205 : gstInitialiser.waitForInitialisation();
140 :
141 205 : m_context.decryptionService = &decryptionService;
142 :
143 205 : if ((!gstSrcFactory) || (!(m_context.gstSrc = gstSrcFactory->getGstSrc())))
144 : {
145 2 : throw std::runtime_error("Cannot create GstSrc");
146 : }
147 :
148 203 : if (!timerFactory)
149 : {
150 1 : throw std::runtime_error("TimeFactory is invalid");
151 : }
152 :
153 404 : if ((!gstProtectionMetadataFactory) ||
154 404 : (!(m_protectionMetadataWrapper = gstProtectionMetadataFactory->createProtectionMetadataWrapper(m_gstWrapper))))
155 : {
156 0 : throw std::runtime_error("Cannot create protection metadata wrapper");
157 : }
158 :
159 : // Ensure that rialtosrc has been initalised
160 202 : m_context.gstSrc->initSrc();
161 :
162 : // Start task thread
163 202 : if ((!workerThreadFactory) || (!(m_workerThread = workerThreadFactory->createWorkerThread())))
164 : {
165 0 : throw std::runtime_error("Failed to create the worker thread");
166 : }
167 :
168 : // Initialise pipeline
169 202 : switch (type)
170 : {
171 201 : case MediaType::MSE:
172 : {
173 201 : initMsePipeline();
174 201 : break;
175 : }
176 1 : default:
177 : {
178 1 : resetWorkerThread();
179 1 : throw std::runtime_error("Media type not supported");
180 : }
181 : }
182 :
183 : // Check the video requirements for a limited video.
184 : // If the video requirements are set to anything lower than the minimum, this playback is assumed to be a secondary
185 : // video in a dual video scenario.
186 201 : if ((kMinPrimaryVideoWidth > videoRequirements.maxWidth) || (kMinPrimaryVideoHeight > videoRequirements.maxHeight))
187 : {
188 8 : RIALTO_SERVER_LOG_MIL("Secondary video playback selected");
189 8 : bool westerossinkSecondaryVideoResult = setWesterossinkSecondaryVideo();
190 8 : bool ermContextResult = setErmContext();
191 8 : if (!westerossinkSecondaryVideoResult && !ermContextResult)
192 : {
193 1 : resetWorkerThread();
194 1 : termPipeline();
195 1 : throw std::runtime_error("Could not set secondary video");
196 : }
197 7 : }
198 : else
199 : {
200 193 : RIALTO_SERVER_LOG_MIL("Primary video playback selected");
201 : }
202 :
203 : m_gstDispatcherThread =
204 200 : gstDispatcherThreadFactory->createGstDispatcherThread(*this, m_context.pipeline, m_gstWrapper);
205 270 : }
206 :
207 400 : GstGenericPlayer::~GstGenericPlayer()
208 : {
209 200 : RIALTO_SERVER_LOG_DEBUG("GstGenericPlayer is destructed.");
210 :
211 200 : m_gstDispatcherThread.reset();
212 :
213 200 : resetWorkerThread();
214 :
215 200 : termPipeline();
216 400 : }
217 :
218 201 : void GstGenericPlayer::initMsePipeline()
219 : {
220 : // Make playbin
221 201 : m_context.pipeline = m_gstWrapper->gstElementFactoryMake("playbin", "media_pipeline");
222 : // Set pipeline flags
223 201 : setPlaybinFlags(true);
224 :
225 : // Set callbacks
226 201 : m_glibWrapper->gSignalConnect(m_context.pipeline, "source-setup", G_CALLBACK(&GstGenericPlayer::setupSource), this);
227 201 : m_glibWrapper->gSignalConnect(m_context.pipeline, "element-setup", G_CALLBACK(&GstGenericPlayer::setupElement), this);
228 201 : m_glibWrapper->gSignalConnect(m_context.pipeline, "deep-element-added",
229 : G_CALLBACK(&GstGenericPlayer::deepElementAdded), this);
230 :
231 : // Set uri
232 201 : m_glibWrapper->gObjectSet(m_context.pipeline, "uri", "rialto://", nullptr);
233 :
234 : // Check playsink
235 201 : GstElement *playsink = (m_gstWrapper->gstBinGetByName(GST_BIN(m_context.pipeline), "playsink"));
236 201 : if (playsink)
237 : {
238 200 : m_glibWrapper->gObjectSet(G_OBJECT(playsink), "send-event-mode", 0, nullptr);
239 200 : m_gstWrapper->gstObjectUnref(playsink);
240 : }
241 : else
242 : {
243 1 : GST_WARNING("No playsink ?!?!?");
244 : }
245 201 : }
246 :
247 202 : void GstGenericPlayer::resetWorkerThread()
248 : {
249 : // Shutdown task thread
250 202 : m_workerThread->enqueueTask(m_taskFactory->createShutdown(*this));
251 202 : m_workerThread->join();
252 202 : m_workerThread.reset();
253 : }
254 :
255 201 : void GstGenericPlayer::termPipeline()
256 : {
257 201 : if (m_finishSourceSetupTimer && m_finishSourceSetupTimer->isActive())
258 : {
259 0 : m_finishSourceSetupTimer->cancel();
260 : }
261 :
262 201 : m_finishSourceSetupTimer.reset();
263 :
264 250 : for (auto &elem : m_context.streamInfo)
265 : {
266 49 : StreamInfo &streamInfo = elem.second;
267 51 : for (auto &buffer : streamInfo.buffers)
268 : {
269 2 : m_gstWrapper->gstBufferUnref(buffer);
270 : }
271 :
272 49 : streamInfo.buffers.clear();
273 : }
274 :
275 201 : m_taskFactory->createStop(m_context, *this)->execute();
276 201 : GstBus *bus = m_gstWrapper->gstPipelineGetBus(GST_PIPELINE(m_context.pipeline));
277 201 : m_gstWrapper->gstBusSetSyncHandler(bus, nullptr, nullptr, nullptr);
278 201 : m_gstWrapper->gstObjectUnref(bus);
279 :
280 201 : if (m_context.source)
281 : {
282 1 : m_gstWrapper->gstObjectUnref(m_context.source);
283 : }
284 201 : if (m_context.subtitleSink)
285 : {
286 4 : m_gstWrapper->gstObjectUnref(m_context.subtitleSink);
287 : }
288 :
289 : // Delete the pipeline
290 201 : m_gstWrapper->gstObjectUnref(m_context.pipeline);
291 : }
292 :
293 805 : unsigned GstGenericPlayer::getGstPlayFlag(const char *nick)
294 : {
295 : GFlagsClass *flagsClass =
296 805 : static_cast<GFlagsClass *>(m_glibWrapper->gTypeClassRef(m_glibWrapper->gTypeFromName("GstPlayFlags")));
297 805 : GFlagsValue *flag = m_glibWrapper->gFlagsGetValueByNick(flagsClass, nick);
298 805 : return flag ? flag->value : 0;
299 : }
300 :
301 1 : void GstGenericPlayer::setupSource(GstElement *pipeline, GstElement *source, GstGenericPlayer *self)
302 : {
303 1 : self->m_gstWrapper->gstObjectRef(source);
304 1 : if (self->m_workerThread)
305 : {
306 1 : self->m_workerThread->enqueueTask(self->m_taskFactory->createSetupSource(self->m_context, *self, source));
307 : }
308 : }
309 :
310 1 : void GstGenericPlayer::setupElement(GstElement *pipeline, GstElement *element, GstGenericPlayer *self)
311 : {
312 1 : RIALTO_SERVER_LOG_DEBUG("Element %s added to the pipeline", GST_ELEMENT_NAME(element));
313 1 : self->m_gstWrapper->gstObjectRef(element);
314 1 : if (self->m_workerThread)
315 : {
316 1 : self->m_workerThread->enqueueTask(self->m_taskFactory->createSetupElement(self->m_context, *self, element));
317 : }
318 : }
319 :
320 1 : void GstGenericPlayer::deepElementAdded(GstBin *pipeline, GstBin *bin, GstElement *element, GstGenericPlayer *self)
321 : {
322 1 : RIALTO_SERVER_LOG_DEBUG("Deep element %s added to the pipeline", GST_ELEMENT_NAME(element));
323 1 : if (self->m_workerThread)
324 : {
325 2 : self->m_workerThread->enqueueTask(
326 2 : self->m_taskFactory->createDeepElementAdded(self->m_context, *self, pipeline, bin, element));
327 : }
328 1 : }
329 :
330 1 : void GstGenericPlayer::attachSource(const std::unique_ptr<IMediaPipeline::MediaSource> &attachedSource)
331 : {
332 1 : if (m_workerThread)
333 : {
334 1 : m_workerThread->enqueueTask(m_taskFactory->createAttachSource(m_context, *this, attachedSource));
335 : }
336 : }
337 :
338 1 : void GstGenericPlayer::removeSource(const MediaSourceType &mediaSourceType)
339 : {
340 1 : if (m_workerThread)
341 : {
342 1 : m_workerThread->enqueueTask(m_taskFactory->createRemoveSource(m_context, *this, mediaSourceType));
343 : }
344 : }
345 :
346 2 : void GstGenericPlayer::allSourcesAttached()
347 : {
348 2 : if (m_workerThread)
349 : {
350 2 : m_workerThread->enqueueTask(m_taskFactory->createFinishSetupSource(m_context, *this));
351 : }
352 : }
353 :
354 1 : void GstGenericPlayer::attachSamples(const IMediaPipeline::MediaSegmentVector &mediaSegments)
355 : {
356 1 : if (m_workerThread)
357 : {
358 1 : m_workerThread->enqueueTask(m_taskFactory->createAttachSamples(m_context, *this, mediaSegments));
359 : }
360 : }
361 :
362 1 : void GstGenericPlayer::attachSamples(const std::shared_ptr<IDataReader> &dataReader)
363 : {
364 1 : if (m_workerThread)
365 : {
366 1 : m_workerThread->enqueueTask(m_taskFactory->createReadShmDataAndAttachSamples(m_context, *this, dataReader));
367 : }
368 : }
369 :
370 1 : void GstGenericPlayer::setPosition(std::int64_t position)
371 : {
372 1 : if (m_workerThread)
373 : {
374 1 : m_workerThread->enqueueTask(m_taskFactory->createSetPosition(m_context, *this, position));
375 : }
376 : }
377 :
378 1 : void GstGenericPlayer::setPlaybackRate(double rate)
379 : {
380 1 : if (m_workerThread)
381 : {
382 1 : m_workerThread->enqueueTask(m_taskFactory->createSetPlaybackRate(m_context, rate));
383 : }
384 : }
385 :
386 4 : bool GstGenericPlayer::getPosition(std::int64_t &position)
387 : {
388 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
389 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
390 4 : if (!m_context.pipeline || GST_STATE(m_context.pipeline) < GST_STATE_PAUSED)
391 : {
392 1 : RIALTO_SERVER_LOG_WARN("GetPosition failed. Pipeline is null or state < PAUSED");
393 1 : return false;
394 : }
395 3 : if (!m_gstWrapper->gstElementQueryPosition(m_context.pipeline, GST_FORMAT_TIME, &position))
396 : {
397 1 : return false;
398 : }
399 2 : return true;
400 : }
401 :
402 34 : GstElement *GstGenericPlayer::getSink(const MediaSourceType &mediaSourceType) const
403 : {
404 34 : const char *kSinkName{nullptr};
405 34 : GstElement *sink{nullptr};
406 34 : switch (mediaSourceType)
407 : {
408 18 : case MediaSourceType::AUDIO:
409 18 : kSinkName = "audio-sink";
410 18 : break;
411 14 : case MediaSourceType::VIDEO:
412 14 : kSinkName = "video-sink";
413 14 : break;
414 2 : default:
415 2 : break;
416 : }
417 34 : if (!kSinkName)
418 : {
419 2 : RIALTO_SERVER_LOG_WARN("mediaSourceType not supported %d", static_cast<int>(mediaSourceType));
420 : }
421 : else
422 : {
423 32 : if (m_context.pipeline == nullptr)
424 : {
425 0 : RIALTO_SERVER_LOG_WARN("Pipeline is NULL!");
426 : }
427 : else
428 : {
429 32 : RIALTO_SERVER_LOG_DEBUG("Pipeline is valid: %p", m_context.pipeline);
430 : }
431 32 : m_glibWrapper->gObjectGet(m_context.pipeline, kSinkName, &sink, nullptr);
432 32 : if (sink)
433 : {
434 22 : GstElement *autoSink{sink};
435 22 : if (firebolt::rialto::MediaSourceType::VIDEO == mediaSourceType)
436 11 : autoSink = getSinkChildIfAutoVideoSink(sink);
437 11 : else if (firebolt::rialto::MediaSourceType::AUDIO == mediaSourceType)
438 11 : autoSink = getSinkChildIfAutoAudioSink(sink);
439 :
440 : // Is this an auto-sink?...
441 22 : if (autoSink != sink)
442 : {
443 2 : m_gstWrapper->gstObjectUnref(GST_OBJECT(sink));
444 :
445 : // increase the reference count of the auto sink
446 2 : sink = GST_ELEMENT(m_gstWrapper->gstObjectRef(GST_OBJECT(autoSink)));
447 : }
448 : }
449 : else
450 : {
451 10 : RIALTO_SERVER_LOG_WARN("%s could not be obtained", kSinkName);
452 : }
453 : }
454 34 : return sink;
455 : }
456 :
457 19 : GstElement *GstGenericPlayer::getDecoder(const MediaSourceType &mediaSourceType)
458 : {
459 19 : GstIterator *it = m_gstWrapper->gstBinIterateRecurse(GST_BIN(m_context.pipeline));
460 19 : GValue item = G_VALUE_INIT;
461 19 : gboolean done = FALSE;
462 :
463 28 : while (!done)
464 : {
465 21 : switch (m_gstWrapper->gstIteratorNext(it, &item))
466 : {
467 12 : case GST_ITERATOR_OK:
468 : {
469 12 : GstElement *element = GST_ELEMENT(m_glibWrapper->gValueGetObject(&item));
470 12 : GstElementFactory *factory = m_gstWrapper->gstElementGetFactory(element);
471 :
472 12 : if (factory)
473 : {
474 12 : GstElementFactoryListType type = GST_ELEMENT_FACTORY_TYPE_DECODER;
475 12 : if (mediaSourceType == MediaSourceType::AUDIO)
476 : {
477 12 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO;
478 : }
479 0 : else if (mediaSourceType == MediaSourceType::VIDEO)
480 : {
481 0 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO;
482 : }
483 :
484 12 : if (m_gstWrapper->gstElementFactoryListIsType(factory, type))
485 : {
486 12 : m_glibWrapper->gValueUnset(&item);
487 12 : m_gstWrapper->gstIteratorFree(it);
488 12 : return GST_ELEMENT(m_gstWrapper->gstObjectRef(element));
489 : }
490 : }
491 :
492 0 : m_glibWrapper->gValueUnset(&item);
493 0 : break;
494 : }
495 2 : case GST_ITERATOR_RESYNC:
496 2 : m_gstWrapper->gstIteratorResync(it);
497 2 : break;
498 7 : case GST_ITERATOR_ERROR:
499 : case GST_ITERATOR_DONE:
500 7 : done = TRUE;
501 7 : break;
502 : }
503 : }
504 :
505 7 : RIALTO_SERVER_LOG_WARN("Could not find decoder");
506 :
507 7 : m_glibWrapper->gValueUnset(&item);
508 7 : m_gstWrapper->gstIteratorFree(it);
509 :
510 7 : return nullptr;
511 : }
512 :
513 3 : GstElement *GstGenericPlayer::getParser(const MediaSourceType &mediaSourceType)
514 : {
515 3 : GstIterator *it = m_gstWrapper->gstBinIterateRecurse(GST_BIN(m_context.pipeline));
516 3 : GValue item = G_VALUE_INIT;
517 3 : gboolean done = FALSE;
518 :
519 4 : while (!done)
520 : {
521 3 : switch (m_gstWrapper->gstIteratorNext(it, &item))
522 : {
523 2 : case GST_ITERATOR_OK:
524 : {
525 2 : GstElement *element = GST_ELEMENT(m_glibWrapper->gValueGetObject(&item));
526 2 : GstElementFactory *factory = m_gstWrapper->gstElementGetFactory(element);
527 :
528 2 : if (factory)
529 : {
530 2 : GstElementFactoryListType type = GST_ELEMENT_FACTORY_TYPE_PARSER;
531 2 : if (mediaSourceType == MediaSourceType::AUDIO)
532 : {
533 0 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO;
534 : }
535 2 : else if (mediaSourceType == MediaSourceType::VIDEO)
536 : {
537 2 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO;
538 : }
539 :
540 2 : if (m_gstWrapper->gstElementFactoryListIsType(factory, type))
541 : {
542 2 : m_glibWrapper->gValueUnset(&item);
543 2 : m_gstWrapper->gstIteratorFree(it);
544 2 : return GST_ELEMENT(m_gstWrapper->gstObjectRef(element));
545 : }
546 : }
547 :
548 0 : m_glibWrapper->gValueUnset(&item);
549 0 : break;
550 : }
551 0 : case GST_ITERATOR_RESYNC:
552 0 : m_gstWrapper->gstIteratorResync(it);
553 0 : break;
554 1 : case GST_ITERATOR_ERROR:
555 : case GST_ITERATOR_DONE:
556 1 : done = TRUE;
557 1 : break;
558 : }
559 : }
560 :
561 1 : RIALTO_SERVER_LOG_WARN("Could not find parser");
562 :
563 1 : m_glibWrapper->gValueUnset(&item);
564 1 : m_gstWrapper->gstIteratorFree(it);
565 :
566 1 : return nullptr;
567 : }
568 :
569 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate>
570 5 : GstGenericPlayer::createAudioAttributes(const std::unique_ptr<IMediaPipeline::MediaSource> &source) const
571 : {
572 5 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate> audioAttributes;
573 5 : const IMediaPipeline::MediaSourceAudio *kSource = dynamic_cast<IMediaPipeline::MediaSourceAudio *>(source.get());
574 5 : if (kSource)
575 : {
576 4 : firebolt::rialto::AudioConfig audioConfig = kSource->getAudioConfig();
577 : audioAttributes =
578 12 : firebolt::rialto::wrappers::AudioAttributesPrivate{"", // param set below.
579 4 : audioConfig.numberOfChannels, audioConfig.sampleRate,
580 : 0, // used only in one of logs in rdk_gstreamer_utils, no
581 : // need to set this param.
582 : 0, // used only in one of logs in rdk_gstreamer_utils, no
583 : // need to set this param.
584 4 : audioConfig.codecSpecificConfig.data(),
585 : static_cast<std::uint32_t>(
586 4 : audioConfig.codecSpecificConfig.size())};
587 4 : if (source->getMimeType() == "audio/mp4" || source->getMimeType() == "audio/aac")
588 : {
589 2 : audioAttributes->m_codecParam = "mp4a";
590 : }
591 2 : else if (source->getMimeType() == "audio/x-eac3")
592 : {
593 1 : audioAttributes->m_codecParam = "ec-3";
594 : }
595 1 : else if (source->getMimeType() == "audio/b-wav" || source->getMimeType() == "audio/x-raw")
596 : {
597 1 : audioAttributes->m_codecParam = "lpcm";
598 : }
599 4 : }
600 : else
601 : {
602 1 : RIALTO_SERVER_LOG_ERROR("Failed to cast source");
603 : }
604 :
605 5 : return audioAttributes;
606 : }
607 :
608 1 : bool GstGenericPlayer::setImmediateOutput(const MediaSourceType &mediaSourceType, bool immediateOutputParam)
609 : {
610 1 : if (!m_workerThread)
611 0 : return false;
612 :
613 2 : m_workerThread->enqueueTask(
614 2 : m_taskFactory->createSetImmediateOutput(m_context, *this, mediaSourceType, immediateOutputParam));
615 1 : return true;
616 : }
617 :
618 5 : bool GstGenericPlayer::getImmediateOutput(const MediaSourceType &mediaSourceType, bool &immediateOutputRef)
619 : {
620 5 : bool returnValue{false};
621 5 : GstElement *sink{getSink(mediaSourceType)};
622 5 : if (sink)
623 : {
624 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "immediate-output"))
625 : {
626 2 : m_glibWrapper->gObjectGet(sink, "immediate-output", &immediateOutputRef, nullptr);
627 2 : returnValue = true;
628 : }
629 : else
630 : {
631 1 : RIALTO_SERVER_LOG_ERROR("immediate-output not supported in element %s", GST_ELEMENT_NAME(sink));
632 : }
633 3 : m_gstWrapper->gstObjectUnref(sink);
634 : }
635 : else
636 : {
637 2 : RIALTO_SERVER_LOG_ERROR("Failed to set immediate-output property, sink is NULL");
638 : }
639 :
640 5 : return returnValue;
641 : }
642 :
643 5 : bool GstGenericPlayer::getStats(const MediaSourceType &mediaSourceType, uint64_t &renderedFrames, uint64_t &droppedFrames)
644 : {
645 5 : bool returnValue{false};
646 5 : GstElement *sink{getSink(mediaSourceType)};
647 5 : if (sink)
648 : {
649 3 : GstStructure *stats{nullptr};
650 3 : m_glibWrapper->gObjectGet(sink, "stats", &stats, nullptr);
651 3 : if (!stats)
652 : {
653 1 : RIALTO_SERVER_LOG_ERROR("failed to get stats from '%s'", GST_ELEMENT_NAME(sink));
654 : }
655 : else
656 : {
657 : guint64 renderedFramesTmp;
658 : guint64 droppedFramesTmp;
659 3 : if (m_gstWrapper->gstStructureGetUint64(stats, "rendered", &renderedFramesTmp) &&
660 1 : m_gstWrapper->gstStructureGetUint64(stats, "dropped", &droppedFramesTmp))
661 : {
662 1 : renderedFrames = renderedFramesTmp;
663 1 : droppedFrames = droppedFramesTmp;
664 1 : returnValue = true;
665 : }
666 : else
667 : {
668 1 : RIALTO_SERVER_LOG_ERROR("failed to get 'rendered' or 'dropped' from structure (%s)",
669 : GST_ELEMENT_NAME(sink));
670 : }
671 2 : m_gstWrapper->gstStructureFree(stats);
672 : }
673 3 : m_gstWrapper->gstObjectUnref(sink);
674 : }
675 :
676 5 : return returnValue;
677 : }
678 :
679 4 : GstBuffer *GstGenericPlayer::createBuffer(const IMediaPipeline::MediaSegment &mediaSegment) const
680 : {
681 4 : GstBuffer *gstBuffer = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getDataLength(), nullptr);
682 4 : m_gstWrapper->gstBufferFill(gstBuffer, 0, mediaSegment.getData(), mediaSegment.getDataLength());
683 :
684 4 : if (mediaSegment.isEncrypted())
685 : {
686 3 : GstBuffer *keyId = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getKeyId().size(), nullptr);
687 3 : m_gstWrapper->gstBufferFill(keyId, 0, mediaSegment.getKeyId().data(), mediaSegment.getKeyId().size());
688 :
689 3 : GstBuffer *initVector = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getInitVector().size(), nullptr);
690 6 : m_gstWrapper->gstBufferFill(initVector, 0, mediaSegment.getInitVector().data(),
691 3 : mediaSegment.getInitVector().size());
692 3 : GstBuffer *subsamples{nullptr};
693 3 : if (!mediaSegment.getSubSamples().empty())
694 : {
695 3 : auto subsamplesRawSize = mediaSegment.getSubSamples().size() * (sizeof(guint16) + sizeof(guint32));
696 3 : guint8 *subsamplesRaw = static_cast<guint8 *>(m_glibWrapper->gMalloc(subsamplesRawSize));
697 : GstByteWriter writer;
698 3 : m_gstWrapper->gstByteWriterInitWithData(&writer, subsamplesRaw, subsamplesRawSize, FALSE);
699 :
700 6 : for (const auto &subSample : mediaSegment.getSubSamples())
701 : {
702 3 : m_gstWrapper->gstByteWriterPutUint16Be(&writer, subSample.numClearBytes);
703 3 : m_gstWrapper->gstByteWriterPutUint32Be(&writer, subSample.numEncryptedBytes);
704 : }
705 3 : subsamples = m_gstWrapper->gstBufferNewWrapped(subsamplesRaw, subsamplesRawSize);
706 : }
707 :
708 3 : uint32_t crypt = 0;
709 3 : uint32_t skip = 0;
710 3 : bool encryptionPatternSet = mediaSegment.getEncryptionPattern(crypt, skip);
711 :
712 3 : GstRialtoProtectionData data = {mediaSegment.getMediaKeySessionId(),
713 3 : static_cast<uint32_t>(mediaSegment.getSubSamples().size()),
714 3 : mediaSegment.getInitWithLast15(),
715 : keyId,
716 : initVector,
717 : subsamples,
718 6 : mediaSegment.getCipherMode(),
719 : crypt,
720 : skip,
721 : encryptionPatternSet,
722 6 : m_context.decryptionService};
723 :
724 3 : if (!m_protectionMetadataWrapper->addProtectionMetadata(gstBuffer, data))
725 : {
726 1 : RIALTO_SERVER_LOG_ERROR("Failed to add protection metadata");
727 1 : if (keyId)
728 : {
729 1 : m_gstWrapper->gstBufferUnref(keyId);
730 : }
731 1 : if (initVector)
732 : {
733 1 : m_gstWrapper->gstBufferUnref(initVector);
734 : }
735 1 : if (subsamples)
736 : {
737 1 : m_gstWrapper->gstBufferUnref(subsamples);
738 : }
739 : }
740 : }
741 :
742 4 : GST_BUFFER_TIMESTAMP(gstBuffer) = mediaSegment.getTimeStamp();
743 4 : GST_BUFFER_DURATION(gstBuffer) = mediaSegment.getDuration();
744 4 : return gstBuffer;
745 : }
746 :
747 4 : void GstGenericPlayer::notifyNeedMediaData(const MediaSourceType mediaSource)
748 : {
749 4 : auto elem = m_context.streamInfo.find(mediaSource);
750 4 : if (elem != m_context.streamInfo.end())
751 : {
752 2 : StreamInfo &streamInfo = elem->second;
753 2 : streamInfo.isNeedDataPending = false;
754 :
755 : // Send new NeedMediaData if we still need it
756 2 : if (m_gstPlayerClient && streamInfo.isDataNeeded)
757 : {
758 2 : streamInfo.isNeedDataPending = m_gstPlayerClient->notifyNeedMediaData(mediaSource);
759 : }
760 : }
761 : else
762 : {
763 2 : RIALTO_SERVER_LOG_WARN("Media type %s could not be found", common::convertMediaSourceType(mediaSource));
764 : }
765 4 : }
766 :
767 19 : void GstGenericPlayer::attachData(const firebolt::rialto::MediaSourceType mediaType)
768 : {
769 19 : auto elem = m_context.streamInfo.find(mediaType);
770 19 : if (elem != m_context.streamInfo.end())
771 : {
772 16 : StreamInfo &streamInfo = elem->second;
773 16 : if (streamInfo.buffers.empty() || !streamInfo.isDataNeeded)
774 : {
775 2 : return;
776 : }
777 :
778 14 : if (firebolt::rialto::MediaSourceType::SUBTITLE == mediaType)
779 : {
780 2 : setTextTrackPositionIfRequired(streamInfo.appSrc);
781 : }
782 : else
783 : {
784 36 : pushSampleIfRequired(streamInfo.appSrc, common::convertMediaSourceType(mediaType));
785 : }
786 14 : if (mediaType == firebolt::rialto::MediaSourceType::AUDIO)
787 : {
788 : // This needs to be done before gstAppSrcPushBuffer() is
789 : // called because it can free the memory
790 7 : m_context.lastAudioSampleTimestamps = static_cast<int64_t>(GST_BUFFER_PTS(streamInfo.buffers.back()));
791 : }
792 :
793 28 : for (GstBuffer *buffer : streamInfo.buffers)
794 : {
795 14 : m_gstWrapper->gstAppSrcPushBuffer(GST_APP_SRC(streamInfo.appSrc), buffer);
796 : }
797 14 : streamInfo.buffers.clear();
798 14 : streamInfo.isDataPushed = true;
799 :
800 14 : const bool kIsSingle = m_context.streamInfo.size() == 1;
801 14 : bool allOtherStreamsPushed = std::all_of(m_context.streamInfo.begin(), m_context.streamInfo.end(),
802 15 : [](const auto &entry) { return entry.second.isDataPushed; });
803 :
804 14 : if (!m_context.bufferedNotificationSent && (allOtherStreamsPushed || kIsSingle) && m_gstPlayerClient)
805 : {
806 1 : m_context.bufferedNotificationSent = true;
807 1 : m_gstPlayerClient->notifyNetworkState(NetworkState::BUFFERED);
808 : }
809 14 : cancelUnderflow(mediaType);
810 :
811 14 : const auto eosInfoIt = m_context.endOfStreamInfo.find(mediaType);
812 14 : if (eosInfoIt != m_context.endOfStreamInfo.end() && eosInfoIt->second == EosState::PENDING)
813 : {
814 0 : setEos(mediaType);
815 : }
816 : }
817 : }
818 :
819 7 : void GstGenericPlayer::updateAudioCaps(int32_t rate, int32_t channels, const std::shared_ptr<CodecData> &codecData)
820 : {
821 7 : auto elem = m_context.streamInfo.find(firebolt::rialto::MediaSourceType::AUDIO);
822 7 : if (elem != m_context.streamInfo.end())
823 : {
824 6 : StreamInfo &streamInfo = elem->second;
825 :
826 6 : constexpr int kInvalidRate{0}, kInvalidChannels{0};
827 6 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(streamInfo.appSrc));
828 6 : GstCaps *newCaps = m_gstWrapper->gstCapsCopy(currentCaps);
829 :
830 6 : if (rate != kInvalidRate)
831 : {
832 3 : m_gstWrapper->gstCapsSetSimple(newCaps, "rate", G_TYPE_INT, rate, NULL);
833 : }
834 :
835 6 : if (channels != kInvalidChannels)
836 : {
837 3 : m_gstWrapper->gstCapsSetSimple(newCaps, "channels", G_TYPE_INT, channels, NULL);
838 : }
839 :
840 6 : setCodecData(newCaps, codecData);
841 :
842 6 : if (!m_gstWrapper->gstCapsIsEqual(currentCaps, newCaps))
843 : {
844 5 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(streamInfo.appSrc), newCaps);
845 : }
846 :
847 6 : m_gstWrapper->gstCapsUnref(newCaps);
848 6 : m_gstWrapper->gstCapsUnref(currentCaps);
849 : }
850 7 : }
851 :
852 8 : void GstGenericPlayer::updateVideoCaps(int32_t width, int32_t height, Fraction frameRate,
853 : const std::shared_ptr<CodecData> &codecData)
854 : {
855 8 : auto elem = m_context.streamInfo.find(firebolt::rialto::MediaSourceType::VIDEO);
856 8 : if (elem != m_context.streamInfo.end())
857 : {
858 7 : StreamInfo &streamInfo = elem->second;
859 :
860 7 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(streamInfo.appSrc));
861 7 : GstCaps *newCaps = m_gstWrapper->gstCapsCopy(currentCaps);
862 :
863 7 : if (width > 0)
864 : {
865 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "width", G_TYPE_INT, width, NULL);
866 : }
867 :
868 7 : if (height > 0)
869 : {
870 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "height", G_TYPE_INT, height, NULL);
871 : }
872 :
873 7 : if ((kUndefinedSize != frameRate.numerator) && (kUndefinedSize != frameRate.denominator))
874 : {
875 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "framerate", GST_TYPE_FRACTION, frameRate.numerator,
876 : frameRate.denominator, NULL);
877 : }
878 :
879 7 : setCodecData(newCaps, codecData);
880 :
881 7 : if (!m_gstWrapper->gstCapsIsEqual(currentCaps, newCaps))
882 : {
883 6 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(streamInfo.appSrc), newCaps);
884 : }
885 :
886 7 : m_gstWrapper->gstCapsUnref(currentCaps);
887 7 : m_gstWrapper->gstCapsUnref(newCaps);
888 : }
889 8 : }
890 :
891 5 : void GstGenericPlayer::addAudioClippingToBuffer(GstBuffer *buffer, uint64_t clippingStart, uint64_t clippingEnd) const
892 : {
893 5 : if (clippingStart || clippingEnd)
894 : {
895 4 : if (m_gstWrapper->gstBufferAddAudioClippingMeta(buffer, GST_FORMAT_TIME, clippingStart, clippingEnd))
896 : {
897 3 : RIALTO_SERVER_LOG_DEBUG("Added audio clipping to buffer %p, start: %" PRIu64 ", end %" PRIu64, buffer,
898 : clippingStart, clippingEnd);
899 : }
900 : else
901 : {
902 1 : RIALTO_SERVER_LOG_WARN("Failed to add audio clipping to buffer %p, start: %" PRIu64 ", end %" PRIu64,
903 : buffer, clippingStart, clippingEnd);
904 : }
905 : }
906 5 : }
907 :
908 13 : bool GstGenericPlayer::setCodecData(GstCaps *caps, const std::shared_ptr<CodecData> &codecData) const
909 : {
910 13 : if (codecData && CodecDataType::BUFFER == codecData->type)
911 : {
912 7 : gpointer memory = m_glibWrapper->gMemdup(codecData->data.data(), codecData->data.size());
913 7 : GstBuffer *buf = m_gstWrapper->gstBufferNewWrapped(memory, codecData->data.size());
914 7 : m_gstWrapper->gstCapsSetSimple(caps, "codec_data", GST_TYPE_BUFFER, buf, nullptr);
915 7 : m_gstWrapper->gstBufferUnref(buf);
916 7 : return true;
917 : }
918 6 : if (codecData && CodecDataType::STRING == codecData->type)
919 : {
920 2 : std::string codecDataStr(codecData->data.begin(), codecData->data.end());
921 2 : m_gstWrapper->gstCapsSetSimple(caps, "codec_data", G_TYPE_STRING, codecDataStr.c_str(), nullptr);
922 2 : return true;
923 : }
924 4 : return false;
925 : }
926 :
927 12 : void GstGenericPlayer::pushSampleIfRequired(GstElement *source, const std::string &typeStr)
928 : {
929 12 : auto initialPosition = m_context.initialPositions.find(source);
930 12 : if (m_context.initialPositions.end() == initialPosition)
931 : {
932 : // Sending initial sample not needed
933 7 : return;
934 : }
935 : // GstAppSrc does not replace segment, if it's the same as previous one.
936 : // It causes problems with position reporing in amlogic devices, so we need to push
937 : // two segments with different reset time value.
938 5 : pushAdditionalSegmentIfRequired(source);
939 :
940 10 : for (const auto &[position, resetTime, appliedRate, stopPosition] : initialPosition->second)
941 : {
942 6 : GstSeekFlags seekFlag = resetTime ? GST_SEEK_FLAG_FLUSH : GST_SEEK_FLAG_NONE;
943 6 : RIALTO_SERVER_LOG_DEBUG("Pushing new %s sample...", typeStr.c_str());
944 6 : GstSegment *segment{m_gstWrapper->gstSegmentNew()};
945 6 : m_gstWrapper->gstSegmentInit(segment, GST_FORMAT_TIME);
946 6 : if (!m_gstWrapper->gstSegmentDoSeek(segment, m_context.playbackRate, GST_FORMAT_TIME, seekFlag,
947 : GST_SEEK_TYPE_SET, position, GST_SEEK_TYPE_SET, stopPosition, nullptr))
948 : {
949 1 : RIALTO_SERVER_LOG_WARN("Segment seek failed.");
950 1 : m_gstWrapper->gstSegmentFree(segment);
951 1 : m_context.initialPositions.erase(initialPosition);
952 1 : return;
953 : }
954 5 : segment->applied_rate = appliedRate;
955 5 : RIALTO_SERVER_LOG_MIL("New %s segment: [%" GST_TIME_FORMAT ", %" GST_TIME_FORMAT
956 : "], rate: %f, appliedRate %f, reset_time: %d\n",
957 : typeStr.c_str(), GST_TIME_ARGS(segment->start), GST_TIME_ARGS(segment->stop),
958 : segment->rate, segment->applied_rate, resetTime);
959 :
960 5 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(source));
961 : // We can't pass buffer in GstSample, because implementation of gst_app_src_push_sample
962 : // uses gst_buffer_copy, which loses RialtoProtectionMeta (that causes problems with EME
963 : // for first frame).
964 5 : GstSample *sample = m_gstWrapper->gstSampleNew(nullptr, currentCaps, segment, nullptr);
965 5 : m_gstWrapper->gstAppSrcPushSample(GST_APP_SRC(source), sample);
966 5 : m_gstWrapper->gstSampleUnref(sample);
967 5 : m_gstWrapper->gstCapsUnref(currentCaps);
968 :
969 5 : m_gstWrapper->gstSegmentFree(segment);
970 : }
971 4 : m_context.currentPosition[source] = initialPosition->second.back();
972 4 : m_context.initialPositions.erase(initialPosition);
973 4 : return;
974 : }
975 :
976 5 : void GstGenericPlayer::pushAdditionalSegmentIfRequired(GstElement *source)
977 : {
978 5 : auto currentPosition = m_context.currentPosition.find(source);
979 5 : if (m_context.currentPosition.end() == currentPosition)
980 : {
981 4 : return;
982 : }
983 1 : auto initialPosition = m_context.initialPositions.find(source);
984 1 : if (m_context.initialPositions.end() == initialPosition)
985 : {
986 0 : return;
987 : }
988 2 : if (initialPosition->second.size() == 1 && initialPosition->second.back().resetTime &&
989 1 : currentPosition->second == initialPosition->second.back())
990 : {
991 1 : RIALTO_SERVER_LOG_INFO("Adding additional segment with reset_time = false");
992 1 : SegmentData additionalSegment = initialPosition->second.back();
993 1 : additionalSegment.resetTime = false;
994 1 : initialPosition->second.push_back(additionalSegment);
995 : }
996 : }
997 :
998 2 : void GstGenericPlayer::setTextTrackPositionIfRequired(GstElement *source)
999 : {
1000 2 : auto initialPosition = m_context.initialPositions.find(source);
1001 2 : if (m_context.initialPositions.end() == initialPosition)
1002 : {
1003 : // Sending initial sample not needed
1004 1 : return;
1005 : }
1006 :
1007 1 : m_glibWrapper->gObjectSet(m_context.subtitleSink, "position",
1008 1 : static_cast<guint64>(initialPosition->second.back().position), nullptr);
1009 :
1010 1 : m_context.initialPositions.erase(initialPosition);
1011 : }
1012 :
1013 7 : bool GstGenericPlayer::reattachSource(const std::unique_ptr<IMediaPipeline::MediaSource> &source)
1014 : {
1015 7 : if (m_context.streamInfo.find(source->getType()) == m_context.streamInfo.end())
1016 : {
1017 1 : RIALTO_SERVER_LOG_ERROR("Unable to switch source, type does not exist");
1018 1 : return false;
1019 : }
1020 6 : if (source->getMimeType().empty())
1021 : {
1022 1 : RIALTO_SERVER_LOG_WARN("Skip switch audio source. Unknown mime type");
1023 1 : return false;
1024 : }
1025 5 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate> audioAttributes{createAudioAttributes(source)};
1026 5 : if (!audioAttributes)
1027 : {
1028 1 : RIALTO_SERVER_LOG_ERROR("Failed to create audio attributes");
1029 1 : return false;
1030 : }
1031 : std::int64_t currentDispPts64b; // In netflix code it's currentDisplayPosition + offset
1032 4 : m_gstWrapper->gstElementQueryPosition(m_context.pipeline, GST_FORMAT_TIME, ¤tDispPts64b);
1033 4 : long long currentDispPts = currentDispPts64b; // NOLINT(runtime/int)
1034 4 : GstCaps *caps{createCapsFromMediaSource(m_gstWrapper, m_glibWrapper, source)};
1035 4 : GstAppSrc *appSrc{GST_APP_SRC(m_context.streamInfo[source->getType()].appSrc)};
1036 4 : GstCaps *oldCaps = m_gstWrapper->gstAppSrcGetCaps(appSrc);
1037 4 : if ((!oldCaps) || (!m_gstWrapper->gstCapsIsEqual(caps, oldCaps)))
1038 : {
1039 3 : RIALTO_SERVER_LOG_DEBUG("Caps not equal. Perform audio track codec channel switch.");
1040 3 : int sampleAttributes{
1041 : 0}; // rdk_gstreamer_utils::performAudioTrackCodecChannelSwitch checks if this param != NULL only.
1042 3 : std::uint32_t status{0}; // must be 0 to make rdk_gstreamer_utils::performAudioTrackCodecChannelSwitch work
1043 3 : unsigned int ui32Delay{0}; // output param
1044 3 : long long audioChangeTargetPts{-1}; // NOLINT(runtime/int) output param. Set audioChangeTargetPts =
1045 : // currentDispPts in rdk_gstreamer_utils function stub
1046 3 : unsigned int audioChangeStage{0}; // Output param. Set to AUDCHG_ALIGN in rdk_gstreamer_utils function stub
1047 3 : gchar *oldCapsCStr = m_gstWrapper->gstCapsToString(oldCaps);
1048 3 : std::string oldCapsStr = std::string(oldCapsCStr);
1049 3 : m_glibWrapper->gFree(oldCapsCStr);
1050 3 : bool audioAac{oldCapsStr.find("audio/mpeg") != std::string::npos};
1051 3 : bool svpEnabled{true}; // assume always true
1052 3 : bool retVal{false}; // Output param. Set to TRUE in rdk_gstreamer_utils function stub
1053 : bool result =
1054 3 : m_rdkGstreamerUtilsWrapper
1055 6 : ->performAudioTrackCodecChannelSwitch(&m_context.playbackGroup, &sampleAttributes, &(*audioAttributes),
1056 : &status, &ui32Delay, &audioChangeTargetPts, ¤tDispPts,
1057 : &audioChangeStage,
1058 : &caps, // may fail for amlogic - that implementation changes
1059 : // this parameter, it's probably used by Netflix later
1060 3 : &audioAac, svpEnabled, GST_ELEMENT(appSrc), &retVal);
1061 :
1062 3 : if (!result || !retVal)
1063 : {
1064 3 : RIALTO_SERVER_LOG_WARN("performAudioTrackCodecChannelSwitch failed! Result: %d, retval %d", result, retVal);
1065 : }
1066 : }
1067 : else
1068 : {
1069 1 : RIALTO_SERVER_LOG_DEBUG("Skip switching audio source - caps are the same.");
1070 : }
1071 :
1072 4 : m_context.lastAudioSampleTimestamps = currentDispPts;
1073 4 : if (caps)
1074 4 : m_gstWrapper->gstCapsUnref(caps);
1075 4 : if (oldCaps)
1076 4 : m_gstWrapper->gstCapsUnref(oldCaps);
1077 :
1078 4 : return true;
1079 5 : }
1080 :
1081 85 : void GstGenericPlayer::scheduleNeedMediaData(GstAppSrc *src)
1082 : {
1083 85 : if (m_workerThread)
1084 : {
1085 85 : m_workerThread->enqueueTask(m_taskFactory->createNeedData(m_context, *this, src));
1086 : }
1087 : }
1088 :
1089 1 : void GstGenericPlayer::scheduleEnoughData(GstAppSrc *src)
1090 : {
1091 1 : if (m_workerThread)
1092 : {
1093 1 : m_workerThread->enqueueTask(m_taskFactory->createEnoughData(m_context, src));
1094 : }
1095 : }
1096 :
1097 3 : void GstGenericPlayer::scheduleAudioUnderflow()
1098 : {
1099 3 : if (m_workerThread)
1100 : {
1101 3 : bool underflowEnabled = m_context.isPlaying && !m_context.audioSourceRemoved;
1102 6 : m_workerThread->enqueueTask(
1103 6 : m_taskFactory->createUnderflow(m_context, *this, underflowEnabled, MediaSourceType::AUDIO));
1104 : }
1105 3 : }
1106 :
1107 2 : void GstGenericPlayer::scheduleVideoUnderflow()
1108 : {
1109 2 : if (m_workerThread)
1110 : {
1111 2 : bool underflowEnabled = m_context.isPlaying;
1112 4 : m_workerThread->enqueueTask(
1113 4 : m_taskFactory->createUnderflow(m_context, *this, underflowEnabled, MediaSourceType::VIDEO));
1114 : }
1115 2 : }
1116 :
1117 1 : void GstGenericPlayer::scheduleAllSourcesAttached()
1118 : {
1119 1 : allSourcesAttached();
1120 : }
1121 :
1122 14 : void GstGenericPlayer::cancelUnderflow(firebolt::rialto::MediaSourceType mediaSource)
1123 : {
1124 14 : auto elem = m_context.streamInfo.find(mediaSource);
1125 14 : if (elem != m_context.streamInfo.end())
1126 : {
1127 14 : StreamInfo &streamInfo = elem->second;
1128 14 : if (!streamInfo.underflowOccured)
1129 : {
1130 11 : return;
1131 : }
1132 :
1133 3 : RIALTO_SERVER_LOG_DEBUG("Cancelling %s underflow", common::convertMediaSourceType(mediaSource));
1134 3 : streamInfo.underflowOccured = false;
1135 : }
1136 : }
1137 :
1138 1 : void GstGenericPlayer::play()
1139 : {
1140 1 : if (m_workerThread)
1141 : {
1142 1 : m_workerThread->enqueueTask(m_taskFactory->createPlay(*this));
1143 : }
1144 : }
1145 :
1146 1 : void GstGenericPlayer::pause()
1147 : {
1148 1 : if (m_workerThread)
1149 : {
1150 1 : m_workerThread->enqueueTask(m_taskFactory->createPause(m_context, *this));
1151 : }
1152 : }
1153 :
1154 1 : void GstGenericPlayer::stop()
1155 : {
1156 1 : if (m_workerThread)
1157 : {
1158 1 : m_workerThread->enqueueTask(m_taskFactory->createStop(m_context, *this));
1159 : }
1160 : }
1161 :
1162 4 : bool GstGenericPlayer::changePipelineState(GstState newState)
1163 : {
1164 4 : if (!m_context.pipeline)
1165 : {
1166 1 : RIALTO_SERVER_LOG_ERROR("Change state failed - pipeline is nullptr");
1167 1 : if (m_gstPlayerClient)
1168 1 : m_gstPlayerClient->notifyPlaybackState(PlaybackState::FAILURE);
1169 1 : return false;
1170 : }
1171 3 : if (m_gstWrapper->gstElementSetState(m_context.pipeline, newState) == GST_STATE_CHANGE_FAILURE)
1172 : {
1173 1 : RIALTO_SERVER_LOG_ERROR("Change state failed - Gstreamer returned an error");
1174 1 : if (m_gstPlayerClient)
1175 1 : m_gstPlayerClient->notifyPlaybackState(PlaybackState::FAILURE);
1176 1 : return false;
1177 : }
1178 2 : return true;
1179 : }
1180 :
1181 1 : void GstGenericPlayer::setVideoGeometry(int x, int y, int width, int height)
1182 : {
1183 1 : if (m_workerThread)
1184 : {
1185 2 : m_workerThread->enqueueTask(
1186 2 : m_taskFactory->createSetVideoGeometry(m_context, *this, Rectangle{x, y, width, height}));
1187 : }
1188 1 : }
1189 :
1190 1 : void GstGenericPlayer::setEos(const firebolt::rialto::MediaSourceType &type)
1191 : {
1192 1 : if (m_workerThread)
1193 : {
1194 1 : m_workerThread->enqueueTask(m_taskFactory->createEos(m_context, *this, type));
1195 : }
1196 : }
1197 :
1198 4 : bool GstGenericPlayer::setVideoSinkRectangle()
1199 : {
1200 4 : bool result = false;
1201 4 : GstElement *videoSink{getSink(MediaSourceType::VIDEO)};
1202 4 : if (videoSink)
1203 : {
1204 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "rectangle"))
1205 : {
1206 : std::string rect =
1207 4 : std::to_string(m_context.pendingGeometry.x) + ',' + std::to_string(m_context.pendingGeometry.y) + ',' +
1208 6 : std::to_string(m_context.pendingGeometry.width) + ',' + std::to_string(m_context.pendingGeometry.height);
1209 2 : m_glibWrapper->gObjectSet(videoSink, "rectangle", rect.c_str(), nullptr);
1210 2 : m_context.pendingGeometry.clear();
1211 2 : result = true;
1212 : }
1213 : else
1214 : {
1215 1 : RIALTO_SERVER_LOG_ERROR("Failed to set the video rectangle");
1216 : }
1217 3 : m_gstWrapper->gstObjectUnref(videoSink);
1218 : }
1219 :
1220 4 : return result;
1221 : }
1222 :
1223 3 : bool GstGenericPlayer::setImmediateOutput()
1224 : {
1225 3 : bool result{false};
1226 3 : if (m_context.pendingImmediateOutputForVideo.has_value())
1227 : {
1228 3 : GstElement *sink{getSink(MediaSourceType::VIDEO)};
1229 3 : if (sink)
1230 : {
1231 2 : bool immediateOutput{m_context.pendingImmediateOutputForVideo.value()};
1232 2 : RIALTO_SERVER_LOG_DEBUG("Set immediate-output to %s", immediateOutput ? "TRUE" : "FALSE");
1233 :
1234 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "immediate-output"))
1235 : {
1236 1 : gboolean immediateOutputGboolean{immediateOutput ? TRUE : FALSE};
1237 1 : m_glibWrapper->gObjectSet(sink, "immediate-output", immediateOutputGboolean, nullptr);
1238 1 : result = true;
1239 : }
1240 : else
1241 : {
1242 1 : RIALTO_SERVER_LOG_ERROR("Failed to set immediate-output property on sink '%s'", GST_ELEMENT_NAME(sink));
1243 : }
1244 2 : m_context.pendingImmediateOutputForVideo.reset();
1245 2 : m_gstWrapper->gstObjectUnref(sink);
1246 : }
1247 : else
1248 : {
1249 1 : RIALTO_SERVER_LOG_DEBUG("Pending an immediate-output, sink is NULL");
1250 : }
1251 : }
1252 3 : return result;
1253 : }
1254 :
1255 4 : bool GstGenericPlayer::setLowLatency()
1256 : {
1257 4 : bool result{false};
1258 4 : if (m_context.pendingLowLatency.has_value())
1259 : {
1260 4 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1261 4 : if (sink)
1262 : {
1263 3 : bool lowLatency{m_context.pendingLowLatency.value()};
1264 3 : RIALTO_SERVER_LOG_DEBUG("Set low-latency to %s", lowLatency ? "TRUE" : "FALSE");
1265 :
1266 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "low-latency"))
1267 : {
1268 2 : gboolean lowLatencyGboolean{lowLatency ? TRUE : FALSE};
1269 2 : m_glibWrapper->gObjectSet(sink, "low-latency", lowLatencyGboolean, nullptr);
1270 2 : result = true;
1271 : }
1272 : else
1273 : {
1274 1 : RIALTO_SERVER_LOG_ERROR("Failed to set low-latency property on sink '%s'", GST_ELEMENT_NAME(sink));
1275 : }
1276 3 : m_context.pendingLowLatency.reset();
1277 3 : m_gstWrapper->gstObjectUnref(sink);
1278 : }
1279 : else
1280 : {
1281 1 : RIALTO_SERVER_LOG_DEBUG("Pending low-latency, sink is NULL");
1282 : }
1283 : }
1284 4 : return result;
1285 : }
1286 :
1287 3 : bool GstGenericPlayer::setSync()
1288 : {
1289 3 : bool result{false};
1290 3 : if (m_context.pendingSync.has_value())
1291 : {
1292 3 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1293 3 : if (sink)
1294 : {
1295 2 : bool sync{m_context.pendingSync.value()};
1296 2 : RIALTO_SERVER_LOG_DEBUG("Set sync to %s", sync ? "TRUE" : "FALSE");
1297 :
1298 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "sync"))
1299 : {
1300 1 : gboolean syncGboolean{sync ? TRUE : FALSE};
1301 1 : m_glibWrapper->gObjectSet(sink, "sync", syncGboolean, nullptr);
1302 1 : result = true;
1303 : }
1304 : else
1305 : {
1306 1 : RIALTO_SERVER_LOG_ERROR("Failed to set sync property on sink '%s'", GST_ELEMENT_NAME(sink));
1307 : }
1308 2 : m_context.pendingSync.reset();
1309 2 : m_gstWrapper->gstObjectUnref(sink);
1310 : }
1311 : else
1312 : {
1313 1 : RIALTO_SERVER_LOG_DEBUG("Pending sync, sink is NULL");
1314 : }
1315 : }
1316 3 : return result;
1317 : }
1318 :
1319 3 : bool GstGenericPlayer::setSyncOff()
1320 : {
1321 3 : bool result{false};
1322 3 : if (m_context.pendingSyncOff.has_value())
1323 : {
1324 3 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1325 3 : if (decoder)
1326 : {
1327 2 : bool syncOff{m_context.pendingSyncOff.value()};
1328 2 : RIALTO_SERVER_LOG_DEBUG("Set sync-off to %s", syncOff ? "TRUE" : "FALSE");
1329 :
1330 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "sync-off"))
1331 : {
1332 1 : gboolean syncOffGboolean{decoder ? TRUE : FALSE};
1333 1 : m_glibWrapper->gObjectSet(decoder, "sync-off", syncOffGboolean, nullptr);
1334 1 : result = true;
1335 : }
1336 : else
1337 : {
1338 1 : RIALTO_SERVER_LOG_ERROR("Failed to set sync-off property on decoder '%s'", GST_ELEMENT_NAME(decoder));
1339 : }
1340 2 : m_context.pendingSyncOff.reset();
1341 2 : m_gstWrapper->gstObjectUnref(decoder);
1342 : }
1343 : else
1344 : {
1345 1 : RIALTO_SERVER_LOG_DEBUG("Pending sync-off, decoder is NULL");
1346 : }
1347 : }
1348 3 : return result;
1349 : }
1350 :
1351 6 : bool GstGenericPlayer::setStreamSyncMode(const MediaSourceType &type)
1352 : {
1353 6 : bool result{false};
1354 6 : int32_t streamSyncMode{0};
1355 : {
1356 6 : std::unique_lock lock{m_context.propertyMutex};
1357 6 : if (m_context.pendingStreamSyncMode.find(type) == m_context.pendingStreamSyncMode.end())
1358 : {
1359 0 : return false;
1360 : }
1361 6 : streamSyncMode = m_context.pendingStreamSyncMode[type];
1362 : }
1363 6 : if (MediaSourceType::AUDIO == type)
1364 : {
1365 3 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1366 3 : if (!decoder)
1367 : {
1368 1 : RIALTO_SERVER_LOG_DEBUG("Pending stream-sync-mode, decoder is NULL");
1369 1 : return false;
1370 : }
1371 :
1372 2 : RIALTO_SERVER_LOG_DEBUG("Set stream-sync-mode to %d", streamSyncMode);
1373 :
1374 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "stream-sync-mode"))
1375 : {
1376 1 : gint streamSyncModeGint{static_cast<gint>(streamSyncMode)};
1377 1 : m_glibWrapper->gObjectSet(decoder, "stream-sync-mode", streamSyncModeGint, nullptr);
1378 1 : result = true;
1379 : }
1380 : else
1381 : {
1382 1 : RIALTO_SERVER_LOG_ERROR("Failed to set stream-sync-mode property on decoder '%s'", GST_ELEMENT_NAME(decoder));
1383 : }
1384 2 : m_gstWrapper->gstObjectUnref(decoder);
1385 2 : std::unique_lock lock{m_context.propertyMutex};
1386 2 : m_context.pendingStreamSyncMode.erase(type);
1387 : }
1388 3 : else if (MediaSourceType::VIDEO == type)
1389 : {
1390 3 : GstElement *parser = getParser(MediaSourceType::VIDEO);
1391 3 : if (!parser)
1392 : {
1393 1 : RIALTO_SERVER_LOG_DEBUG("Pending syncmode-streaming, parser is NULL");
1394 1 : return false;
1395 : }
1396 :
1397 2 : gboolean streamSyncModeBoolean{static_cast<gboolean>(streamSyncMode)};
1398 2 : RIALTO_SERVER_LOG_DEBUG("Set syncmode-streaming to %d", streamSyncMode);
1399 :
1400 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(parser), "syncmode-streaming"))
1401 : {
1402 1 : m_glibWrapper->gObjectSet(parser, "syncmode-streaming", streamSyncModeBoolean, nullptr);
1403 1 : result = true;
1404 : }
1405 : else
1406 : {
1407 1 : RIALTO_SERVER_LOG_ERROR("Failed to set syncmode-streaming property on parser '%s'", GST_ELEMENT_NAME(parser));
1408 : }
1409 2 : m_gstWrapper->gstObjectUnref(parser);
1410 2 : std::unique_lock lock{m_context.propertyMutex};
1411 2 : m_context.pendingStreamSyncMode.erase(type);
1412 : }
1413 4 : return result;
1414 : }
1415 :
1416 3 : bool GstGenericPlayer::setRenderFrame()
1417 : {
1418 3 : bool result{false};
1419 3 : if (m_context.pendingRenderFrame)
1420 : {
1421 5 : static const std::string kStepOnPrerollPropertyName = "frame-step-on-preroll";
1422 3 : GstElement *sink{getSink(MediaSourceType::VIDEO)};
1423 3 : if (sink)
1424 : {
1425 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), kStepOnPrerollPropertyName.c_str()))
1426 : {
1427 1 : RIALTO_SERVER_LOG_INFO("Rendering preroll");
1428 :
1429 1 : m_glibWrapper->gObjectSet(sink, kStepOnPrerollPropertyName.c_str(), 1, nullptr);
1430 1 : m_gstWrapper->gstElementSendEvent(sink, m_gstWrapper->gstEventNewStep(GST_FORMAT_BUFFERS, 1, 1.0, true,
1431 : false));
1432 1 : m_glibWrapper->gObjectSet(sink, kStepOnPrerollPropertyName.c_str(), 0, nullptr);
1433 1 : result = true;
1434 : }
1435 : else
1436 : {
1437 1 : RIALTO_SERVER_LOG_ERROR("Video sink doesn't have property `%s`", kStepOnPrerollPropertyName.c_str());
1438 : }
1439 2 : m_gstWrapper->gstObjectUnref(sink);
1440 2 : m_context.pendingRenderFrame = false;
1441 : }
1442 : else
1443 : {
1444 1 : RIALTO_SERVER_LOG_DEBUG("Pending render frame, sink is NULL");
1445 : }
1446 : }
1447 3 : return result;
1448 : }
1449 :
1450 3 : bool GstGenericPlayer::setBufferingLimit()
1451 : {
1452 3 : bool result{false};
1453 3 : guint bufferingLimit{0};
1454 : {
1455 3 : std::unique_lock lock{m_context.propertyMutex};
1456 3 : if (!m_context.pendingBufferingLimit.has_value())
1457 : {
1458 0 : return false;
1459 : }
1460 3 : bufferingLimit = static_cast<guint>(m_context.pendingBufferingLimit.value());
1461 : }
1462 :
1463 3 : GstElement *decoder{getDecoder(MediaSourceType::AUDIO)};
1464 3 : if (decoder)
1465 : {
1466 2 : RIALTO_SERVER_LOG_DEBUG("Set limit-buffering-ms to %u", bufferingLimit);
1467 :
1468 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "limit-buffering-ms"))
1469 : {
1470 1 : m_glibWrapper->gObjectSet(decoder, "limit-buffering-ms", bufferingLimit, nullptr);
1471 1 : result = true;
1472 : }
1473 : else
1474 : {
1475 1 : RIALTO_SERVER_LOG_ERROR("Failed to set limit-buffering-ms property on decoder '%s'",
1476 : GST_ELEMENT_NAME(decoder));
1477 : }
1478 2 : m_gstWrapper->gstObjectUnref(decoder);
1479 2 : std::unique_lock lock{m_context.propertyMutex};
1480 2 : m_context.pendingBufferingLimit.reset();
1481 : }
1482 : else
1483 : {
1484 1 : RIALTO_SERVER_LOG_DEBUG("Pending limit-buffering-ms, decoder is NULL");
1485 : }
1486 3 : return result;
1487 : }
1488 :
1489 2 : bool GstGenericPlayer::setUseBuffering()
1490 : {
1491 2 : std::unique_lock lock{m_context.propertyMutex};
1492 2 : if (m_context.pendingUseBuffering.has_value())
1493 : {
1494 2 : if (m_context.playbackGroup.m_curAudioDecodeBin)
1495 : {
1496 1 : gboolean useBufferingGboolean{m_context.pendingUseBuffering.value() ? TRUE : FALSE};
1497 1 : RIALTO_SERVER_LOG_DEBUG("Set use-buffering to %d", useBufferingGboolean);
1498 1 : m_glibWrapper->gObjectSet(m_context.playbackGroup.m_curAudioDecodeBin, "use-buffering",
1499 : useBufferingGboolean, nullptr);
1500 1 : m_context.pendingUseBuffering.reset();
1501 1 : return true;
1502 : }
1503 : else
1504 : {
1505 1 : RIALTO_SERVER_LOG_DEBUG("Pending use-buffering, decodebin is NULL");
1506 : }
1507 : }
1508 1 : return false;
1509 2 : }
1510 :
1511 8 : bool GstGenericPlayer::setWesterossinkSecondaryVideo()
1512 : {
1513 8 : bool result = false;
1514 8 : GstElementFactory *factory = m_gstWrapper->gstElementFactoryFind("westerossink");
1515 8 : if (factory)
1516 : {
1517 7 : GstElement *videoSink = m_gstWrapper->gstElementFactoryCreate(factory, nullptr);
1518 7 : if (videoSink)
1519 : {
1520 5 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "res-usage"))
1521 : {
1522 4 : m_glibWrapper->gObjectSet(videoSink, "res-usage", 0x0u, nullptr);
1523 4 : m_glibWrapper->gObjectSet(m_context.pipeline, "video-sink", videoSink, nullptr);
1524 4 : result = true;
1525 : }
1526 : else
1527 : {
1528 1 : RIALTO_SERVER_LOG_ERROR("Failed to set the westerossink res-usage");
1529 1 : m_gstWrapper->gstObjectUnref(GST_OBJECT(videoSink));
1530 : }
1531 : }
1532 : else
1533 : {
1534 2 : RIALTO_SERVER_LOG_ERROR("Failed to create the westerossink");
1535 : }
1536 :
1537 7 : m_gstWrapper->gstObjectUnref(GST_OBJECT(factory));
1538 : }
1539 : else
1540 : {
1541 : // No westeros sink
1542 1 : result = true;
1543 : }
1544 :
1545 8 : return result;
1546 : }
1547 :
1548 8 : bool GstGenericPlayer::setErmContext()
1549 : {
1550 8 : bool result = false;
1551 8 : GstContext *context = m_gstWrapper->gstContextNew("erm", false);
1552 8 : if (context)
1553 : {
1554 6 : GstStructure *contextStructure = m_gstWrapper->gstContextWritableStructure(context);
1555 6 : if (contextStructure)
1556 : {
1557 5 : m_gstWrapper->gstStructureSet(contextStructure, "res-usage", G_TYPE_UINT, 0x0u, nullptr);
1558 5 : m_gstWrapper->gstElementSetContext(GST_ELEMENT(m_context.pipeline), context);
1559 5 : result = true;
1560 : }
1561 : else
1562 : {
1563 1 : RIALTO_SERVER_LOG_ERROR("Failed to create the erm structure");
1564 : }
1565 6 : m_gstWrapper->gstContextUnref(context);
1566 : }
1567 : else
1568 : {
1569 2 : RIALTO_SERVER_LOG_ERROR("Failed to create the erm context");
1570 : }
1571 :
1572 8 : return result;
1573 : }
1574 :
1575 6 : void GstGenericPlayer::startPositionReportingAndCheckAudioUnderflowTimer()
1576 : {
1577 6 : if (m_positionReportingAndCheckAudioUnderflowTimer && m_positionReportingAndCheckAudioUnderflowTimer->isActive())
1578 : {
1579 1 : return;
1580 : }
1581 :
1582 15 : m_positionReportingAndCheckAudioUnderflowTimer = m_timerFactory->createTimer(
1583 : kPositionReportTimerMs,
1584 10 : [this]()
1585 : {
1586 1 : if (m_workerThread)
1587 : {
1588 1 : m_workerThread->enqueueTask(m_taskFactory->createReportPosition(m_context));
1589 1 : m_workerThread->enqueueTask(m_taskFactory->createCheckAudioUnderflow(m_context, *this));
1590 : }
1591 1 : },
1592 5 : firebolt::rialto::common::TimerType::PERIODIC);
1593 : }
1594 :
1595 4 : void GstGenericPlayer::stopPositionReportingAndCheckAudioUnderflowTimer()
1596 : {
1597 4 : if (m_positionReportingAndCheckAudioUnderflowTimer && m_positionReportingAndCheckAudioUnderflowTimer->isActive())
1598 : {
1599 1 : m_positionReportingAndCheckAudioUnderflowTimer->cancel();
1600 1 : m_positionReportingAndCheckAudioUnderflowTimer.reset();
1601 : }
1602 4 : }
1603 :
1604 2 : void GstGenericPlayer::stopWorkerThread()
1605 : {
1606 2 : if (m_workerThread)
1607 : {
1608 2 : m_workerThread->stop();
1609 : }
1610 : }
1611 :
1612 0 : void GstGenericPlayer::setPendingPlaybackRate()
1613 : {
1614 0 : RIALTO_SERVER_LOG_INFO("Setting pending playback rate");
1615 0 : setPlaybackRate(m_context.pendingPlaybackRate);
1616 : }
1617 :
1618 1 : void GstGenericPlayer::renderFrame()
1619 : {
1620 1 : if (m_workerThread)
1621 : {
1622 1 : m_workerThread->enqueueTask(m_taskFactory->createRenderFrame(m_context, *this));
1623 : }
1624 : }
1625 :
1626 16 : void GstGenericPlayer::setVolume(double targetVolume, uint32_t volumeDuration, firebolt::rialto::EaseType easeType)
1627 : {
1628 16 : if (m_workerThread)
1629 : {
1630 32 : m_workerThread->enqueueTask(
1631 32 : m_taskFactory->createSetVolume(m_context, *this, targetVolume, volumeDuration, easeType));
1632 : }
1633 16 : }
1634 :
1635 3 : bool GstGenericPlayer::getVolume(double ¤tVolume)
1636 : {
1637 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
1638 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
1639 3 : if (!m_context.pipeline)
1640 : {
1641 0 : return false;
1642 : }
1643 :
1644 : // NOTE: No gstreamer documentation for "fade-volume" could be found at the time this code was written.
1645 : // Therefore the author performed several tests on a supported platform (Flex2) to determine the behaviour of this property.
1646 : // The code has been written to be backwardly compatible on platforms that don't have this property.
1647 : // The observed behaviour was:
1648 : // - if the returned fade volume is negative then audio-fade is not active. In this case the usual technique
1649 : // to find volume in the pipeline works and is used.
1650 : // - if the returned fade volume is positive then audio-fade is active. In this case the returned fade volume
1651 : // directly returns the current volume level 0=min to 100=max (and the pipeline's current volume level is
1652 : // meaningless and doesn't contribute in this case).
1653 3 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1654 5 : if (m_context.audioFadeEnabled && sink &&
1655 2 : m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "fade-volume"))
1656 : {
1657 2 : gint fadeVolume{-100};
1658 2 : m_glibWrapper->gObjectGet(sink, "fade-volume", &fadeVolume, NULL);
1659 2 : if (fadeVolume < 0)
1660 : {
1661 1 : currentVolume = m_gstWrapper->gstStreamVolumeGetVolume(GST_STREAM_VOLUME(m_context.pipeline),
1662 : GST_STREAM_VOLUME_FORMAT_LINEAR);
1663 1 : RIALTO_SERVER_LOG_INFO("Fade volume is negative, using volume from pipeline: %f", currentVolume);
1664 : }
1665 : else
1666 : {
1667 1 : currentVolume = static_cast<double>(fadeVolume) / 100.0;
1668 1 : RIALTO_SERVER_LOG_INFO("Fade volume is supported: %f", currentVolume);
1669 : }
1670 : }
1671 : else
1672 : {
1673 1 : currentVolume = m_gstWrapper->gstStreamVolumeGetVolume(GST_STREAM_VOLUME(m_context.pipeline),
1674 : GST_STREAM_VOLUME_FORMAT_LINEAR);
1675 1 : RIALTO_SERVER_LOG_INFO("Fade volume is not supported, using volume from pipeline: %f", currentVolume);
1676 : }
1677 :
1678 3 : if (sink)
1679 2 : m_gstWrapper->gstObjectUnref(sink);
1680 :
1681 3 : return true;
1682 : }
1683 :
1684 1 : void GstGenericPlayer::setMute(const MediaSourceType &mediaSourceType, bool mute)
1685 : {
1686 1 : if (m_workerThread)
1687 : {
1688 1 : m_workerThread->enqueueTask(m_taskFactory->createSetMute(m_context, *this, mediaSourceType, mute));
1689 : }
1690 : }
1691 :
1692 5 : bool GstGenericPlayer::getMute(const MediaSourceType &mediaSourceType, bool &mute)
1693 : {
1694 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
1695 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
1696 5 : if (mediaSourceType == MediaSourceType::SUBTITLE)
1697 : {
1698 2 : if (!m_context.subtitleSink)
1699 : {
1700 1 : RIALTO_SERVER_LOG_ERROR("There is no subtitle sink");
1701 1 : return false;
1702 : }
1703 1 : gboolean muteValue{FALSE};
1704 1 : m_glibWrapper->gObjectGet(m_context.subtitleSink, "mute", &muteValue, nullptr);
1705 1 : mute = muteValue;
1706 : }
1707 3 : else if (mediaSourceType == MediaSourceType::AUDIO)
1708 : {
1709 2 : if (!m_context.pipeline)
1710 : {
1711 1 : return false;
1712 : }
1713 1 : mute = m_gstWrapper->gstStreamVolumeGetMute(GST_STREAM_VOLUME(m_context.pipeline));
1714 : }
1715 : else
1716 : {
1717 1 : RIALTO_SERVER_LOG_ERROR("Getting mute for type %s unsupported", common::convertMediaSourceType(mediaSourceType));
1718 1 : return false;
1719 : }
1720 :
1721 2 : return true;
1722 : }
1723 :
1724 1 : void GstGenericPlayer::setTextTrackIdentifier(const std::string &textTrackIdentifier)
1725 : {
1726 1 : if (m_workerThread)
1727 : {
1728 1 : m_workerThread->enqueueTask(m_taskFactory->createSetTextTrackIdentifier(m_context, textTrackIdentifier));
1729 : }
1730 : }
1731 :
1732 3 : bool GstGenericPlayer::getTextTrackIdentifier(std::string &textTrackIdentifier)
1733 : {
1734 3 : if (!m_context.subtitleSink)
1735 : {
1736 1 : RIALTO_SERVER_LOG_ERROR("There is no subtitle sink");
1737 1 : return false;
1738 : }
1739 :
1740 2 : gchar *identifier = nullptr;
1741 2 : m_glibWrapper->gObjectGet(m_context.subtitleSink, "text-track-identifier", &identifier, nullptr);
1742 :
1743 2 : if (identifier)
1744 : {
1745 1 : textTrackIdentifier = identifier;
1746 1 : m_glibWrapper->gFree(identifier);
1747 1 : return true;
1748 : }
1749 : else
1750 : {
1751 1 : RIALTO_SERVER_LOG_ERROR("Failed to get text track identifier");
1752 1 : return false;
1753 : }
1754 : }
1755 :
1756 1 : bool GstGenericPlayer::setLowLatency(bool lowLatency)
1757 : {
1758 1 : if (m_workerThread)
1759 : {
1760 1 : m_workerThread->enqueueTask(m_taskFactory->createSetLowLatency(m_context, *this, lowLatency));
1761 : }
1762 1 : return true;
1763 : }
1764 :
1765 1 : bool GstGenericPlayer::setSync(bool sync)
1766 : {
1767 1 : if (m_workerThread)
1768 : {
1769 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSync(m_context, *this, sync));
1770 : }
1771 1 : return true;
1772 : }
1773 :
1774 4 : bool GstGenericPlayer::getSync(bool &sync)
1775 : {
1776 4 : bool returnValue{false};
1777 4 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1778 4 : if (sink)
1779 : {
1780 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "sync"))
1781 : {
1782 1 : m_glibWrapper->gObjectGet(sink, "sync", &sync, nullptr);
1783 1 : returnValue = true;
1784 : }
1785 : else
1786 : {
1787 1 : RIALTO_SERVER_LOG_ERROR("Sync not supported in sink '%s'", GST_ELEMENT_NAME(sink));
1788 : }
1789 2 : m_gstWrapper->gstObjectUnref(sink);
1790 : }
1791 2 : else if (m_context.pendingSync.has_value())
1792 : {
1793 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
1794 1 : sync = m_context.pendingSync.value();
1795 1 : returnValue = true;
1796 : }
1797 : else
1798 : {
1799 : // We dont know the default setting on the sync, so return failure here
1800 1 : RIALTO_SERVER_LOG_WARN("No audio sink attached or queued value");
1801 : }
1802 :
1803 4 : return returnValue;
1804 : }
1805 :
1806 1 : bool GstGenericPlayer::setSyncOff(bool syncOff)
1807 : {
1808 1 : if (m_workerThread)
1809 : {
1810 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSyncOff(m_context, *this, syncOff));
1811 : }
1812 1 : return true;
1813 : }
1814 :
1815 1 : bool GstGenericPlayer::setStreamSyncMode(const MediaSourceType &mediaSourceType, int32_t streamSyncMode)
1816 : {
1817 1 : if (m_workerThread)
1818 : {
1819 2 : m_workerThread->enqueueTask(
1820 2 : m_taskFactory->createSetStreamSyncMode(m_context, *this, mediaSourceType, streamSyncMode));
1821 : }
1822 1 : return true;
1823 : }
1824 :
1825 5 : bool GstGenericPlayer::getStreamSyncMode(int32_t &streamSyncMode)
1826 : {
1827 5 : bool returnValue{false};
1828 5 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1829 5 : if (decoder && m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "stream-sync-mode"))
1830 : {
1831 2 : m_glibWrapper->gObjectGet(decoder, "stream-sync-mode", &streamSyncMode, nullptr);
1832 2 : returnValue = true;
1833 : }
1834 : else
1835 : {
1836 3 : std::unique_lock lock{m_context.propertyMutex};
1837 3 : if (m_context.pendingStreamSyncMode.find(MediaSourceType::AUDIO) != m_context.pendingStreamSyncMode.end())
1838 : {
1839 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
1840 1 : streamSyncMode = m_context.pendingStreamSyncMode[MediaSourceType::AUDIO];
1841 1 : returnValue = true;
1842 : }
1843 : else
1844 : {
1845 2 : RIALTO_SERVER_LOG_ERROR("Stream sync mode not supported in decoder '%s'",
1846 : (decoder ? GST_ELEMENT_NAME(decoder) : "null"));
1847 : }
1848 3 : }
1849 :
1850 5 : if (decoder)
1851 3 : m_gstWrapper->gstObjectUnref(GST_OBJECT(decoder));
1852 :
1853 5 : return returnValue;
1854 : }
1855 :
1856 1 : void GstGenericPlayer::ping(std::unique_ptr<IHeartbeatHandler> &&heartbeatHandler)
1857 : {
1858 1 : if (m_workerThread)
1859 : {
1860 1 : m_workerThread->enqueueTask(m_taskFactory->createPing(std::move(heartbeatHandler)));
1861 : }
1862 : }
1863 :
1864 1 : void GstGenericPlayer::flush(const MediaSourceType &mediaSourceType, bool resetTime)
1865 : {
1866 1 : if (m_workerThread)
1867 : {
1868 1 : m_workerThread->enqueueTask(m_taskFactory->createFlush(m_context, *this, mediaSourceType, resetTime));
1869 : }
1870 : }
1871 :
1872 1 : void GstGenericPlayer::setSourcePosition(const MediaSourceType &mediaSourceType, int64_t position, bool resetTime,
1873 : double appliedRate, uint64_t stopPosition)
1874 : {
1875 1 : if (m_workerThread)
1876 : {
1877 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSourcePosition(m_context, *this, mediaSourceType, position,
1878 : resetTime, appliedRate, stopPosition));
1879 : }
1880 : }
1881 :
1882 1 : void GstGenericPlayer::processAudioGap(int64_t position, uint32_t duration, int64_t discontinuityGap, bool audioAac)
1883 : {
1884 1 : if (m_workerThread)
1885 : {
1886 2 : m_workerThread->enqueueTask(
1887 2 : m_taskFactory->createProcessAudioGap(m_context, position, duration, discontinuityGap, audioAac));
1888 : }
1889 1 : }
1890 :
1891 1 : void GstGenericPlayer::setBufferingLimit(uint32_t limitBufferingMs)
1892 : {
1893 1 : if (m_workerThread)
1894 : {
1895 1 : m_workerThread->enqueueTask(m_taskFactory->createSetBufferingLimit(m_context, *this, limitBufferingMs));
1896 : }
1897 : }
1898 :
1899 5 : bool GstGenericPlayer::getBufferingLimit(uint32_t &limitBufferingMs)
1900 : {
1901 5 : bool returnValue{false};
1902 5 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1903 5 : if (decoder && m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "limit-buffering-ms"))
1904 : {
1905 2 : m_glibWrapper->gObjectGet(decoder, "limit-buffering-ms", &limitBufferingMs, nullptr);
1906 2 : returnValue = true;
1907 : }
1908 : else
1909 : {
1910 3 : std::unique_lock lock{m_context.propertyMutex};
1911 3 : if (m_context.pendingBufferingLimit.has_value())
1912 : {
1913 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
1914 1 : limitBufferingMs = m_context.pendingBufferingLimit.value();
1915 1 : returnValue = true;
1916 : }
1917 : else
1918 : {
1919 2 : RIALTO_SERVER_LOG_ERROR("buffering limit not supported in decoder '%s'",
1920 : (decoder ? GST_ELEMENT_NAME(decoder) : "null"));
1921 : }
1922 3 : }
1923 :
1924 5 : if (decoder)
1925 3 : m_gstWrapper->gstObjectUnref(GST_OBJECT(decoder));
1926 :
1927 5 : return returnValue;
1928 : }
1929 :
1930 1 : void GstGenericPlayer::setUseBuffering(bool useBuffering)
1931 : {
1932 1 : if (m_workerThread)
1933 : {
1934 1 : m_workerThread->enqueueTask(m_taskFactory->createSetUseBuffering(m_context, *this, useBuffering));
1935 : }
1936 : }
1937 :
1938 3 : bool GstGenericPlayer::getUseBuffering(bool &useBuffering)
1939 : {
1940 3 : if (m_context.playbackGroup.m_curAudioDecodeBin)
1941 : {
1942 1 : m_glibWrapper->gObjectGet(m_context.playbackGroup.m_curAudioDecodeBin, "use-buffering", &useBuffering, nullptr);
1943 1 : return true;
1944 : }
1945 : else
1946 : {
1947 2 : std::unique_lock lock{m_context.propertyMutex};
1948 2 : if (m_context.pendingUseBuffering.has_value())
1949 : {
1950 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
1951 1 : useBuffering = m_context.pendingUseBuffering.value();
1952 1 : return true;
1953 : }
1954 2 : }
1955 1 : return false;
1956 : }
1957 :
1958 1 : void GstGenericPlayer::switchSource(const std::unique_ptr<IMediaPipeline::MediaSource> &mediaSource)
1959 : {
1960 1 : if (m_workerThread)
1961 : {
1962 1 : m_workerThread->enqueueTask(m_taskFactory->createSwitchSource(*this, mediaSource));
1963 : }
1964 : }
1965 :
1966 1 : void GstGenericPlayer::handleBusMessage(GstMessage *message)
1967 : {
1968 1 : m_workerThread->enqueueTask(m_taskFactory->createHandleBusMessage(m_context, *this, message));
1969 : }
1970 :
1971 1 : void GstGenericPlayer::updatePlaybackGroup(GstElement *typefind, const GstCaps *caps)
1972 : {
1973 1 : m_workerThread->enqueueTask(m_taskFactory->createUpdatePlaybackGroup(m_context, *this, typefind, caps));
1974 : }
1975 :
1976 3 : void GstGenericPlayer::addAutoVideoSinkChild(GObject *object)
1977 : {
1978 : // Only add children that are sinks
1979 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
1980 : {
1981 2 : RIALTO_SERVER_LOG_DEBUG("Store AutoVideoSink child sink");
1982 :
1983 2 : if (m_context.autoVideoChildSink && m_context.autoVideoChildSink != GST_ELEMENT(object))
1984 : {
1985 1 : RIALTO_SERVER_LOG_MIL("AutoVideoSink child is been overwritten");
1986 : }
1987 2 : m_context.autoVideoChildSink = GST_ELEMENT(object);
1988 : }
1989 3 : }
1990 :
1991 3 : void GstGenericPlayer::addAutoAudioSinkChild(GObject *object)
1992 : {
1993 : // Only add children that are sinks
1994 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
1995 : {
1996 2 : RIALTO_SERVER_LOG_DEBUG("Store AutoAudioSink child sink");
1997 :
1998 2 : if (m_context.autoAudioChildSink && m_context.autoAudioChildSink != GST_ELEMENT(object))
1999 : {
2000 1 : RIALTO_SERVER_LOG_MIL("AutoAudioSink child is been overwritten");
2001 : }
2002 2 : m_context.autoAudioChildSink = GST_ELEMENT(object);
2003 : }
2004 3 : }
2005 :
2006 3 : void GstGenericPlayer::removeAutoVideoSinkChild(GObject *object)
2007 : {
2008 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2009 : {
2010 3 : RIALTO_SERVER_LOG_DEBUG("Remove AutoVideoSink child sink");
2011 :
2012 3 : if (m_context.autoVideoChildSink && m_context.autoVideoChildSink != GST_ELEMENT(object))
2013 : {
2014 1 : RIALTO_SERVER_LOG_MIL("AutoVideoSink child sink is not the same as the one stored");
2015 1 : return;
2016 : }
2017 :
2018 2 : m_context.autoVideoChildSink = nullptr;
2019 : }
2020 : }
2021 :
2022 3 : void GstGenericPlayer::removeAutoAudioSinkChild(GObject *object)
2023 : {
2024 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2025 : {
2026 3 : RIALTO_SERVER_LOG_DEBUG("Remove AutoAudioSink child sink");
2027 :
2028 3 : if (m_context.autoAudioChildSink && m_context.autoAudioChildSink != GST_ELEMENT(object))
2029 : {
2030 1 : RIALTO_SERVER_LOG_MIL("AutoAudioSink child sink is not the same as the one stored");
2031 1 : return;
2032 : }
2033 :
2034 2 : m_context.autoAudioChildSink = nullptr;
2035 : }
2036 : }
2037 :
2038 11 : GstElement *GstGenericPlayer::getSinkChildIfAutoVideoSink(GstElement *sink) const
2039 : {
2040 11 : const gchar *kTmpName = m_glibWrapper->gTypeName(G_OBJECT_TYPE(sink));
2041 11 : if (!kTmpName)
2042 0 : return sink;
2043 :
2044 22 : const std::string kElementTypeName{kTmpName};
2045 11 : if (kElementTypeName == "GstAutoVideoSink")
2046 : {
2047 1 : if (!m_context.autoVideoChildSink)
2048 : {
2049 0 : RIALTO_SERVER_LOG_WARN("No child sink has been added to the autovideosink");
2050 : }
2051 : else
2052 : {
2053 1 : return m_context.autoVideoChildSink;
2054 : }
2055 : }
2056 10 : return sink;
2057 11 : }
2058 :
2059 11 : GstElement *GstGenericPlayer::getSinkChildIfAutoAudioSink(GstElement *sink) const
2060 : {
2061 11 : const gchar *kTmpName = m_glibWrapper->gTypeName(G_OBJECT_TYPE(sink));
2062 11 : if (!kTmpName)
2063 0 : return sink;
2064 :
2065 22 : const std::string kElementTypeName{kTmpName};
2066 11 : if (kElementTypeName == "GstAutoAudioSink")
2067 : {
2068 1 : if (!m_context.autoAudioChildSink)
2069 : {
2070 0 : RIALTO_SERVER_LOG_WARN("No child sink has been added to the autoaudiosink");
2071 : }
2072 : else
2073 : {
2074 1 : return m_context.autoAudioChildSink;
2075 : }
2076 : }
2077 10 : return sink;
2078 11 : }
2079 :
2080 201 : void GstGenericPlayer::setPlaybinFlags(bool enableAudio)
2081 : {
2082 201 : unsigned flags = getGstPlayFlag("video") | getGstPlayFlag("native-video") | getGstPlayFlag("text");
2083 :
2084 201 : if (enableAudio)
2085 : {
2086 201 : flags |= getGstPlayFlag("audio");
2087 201 : flags |= shouldEnableNativeAudio() ? getGstPlayFlag("native-audio") : 0;
2088 : }
2089 :
2090 201 : m_glibWrapper->gObjectSet(m_context.pipeline, "flags", flags, nullptr);
2091 : }
2092 :
2093 201 : bool GstGenericPlayer::shouldEnableNativeAudio()
2094 : {
2095 201 : GstElementFactory *factory = m_gstWrapper->gstElementFactoryFind("brcmaudiosink");
2096 201 : if (factory)
2097 : {
2098 1 : m_gstWrapper->gstObjectUnref(GST_OBJECT(factory));
2099 1 : return true;
2100 : }
2101 200 : return false;
2102 : }
2103 :
2104 : }; // namespace firebolt::rialto::server
|