Line data Source code
1 : /*
2 : * If not stated otherwise in this file or this component's LICENSE file the
3 : * following copyright and licenses apply:
4 : *
5 : * Copyright 2022 Sky UK
6 : *
7 : * Licensed under the Apache License, Version 2.0 (the "License");
8 : * you may not use this file except in compliance with the License.
9 : * You may obtain a copy of the License at
10 : *
11 : * http://www.apache.org/licenses/LICENSE-2.0
12 : *
13 : * Unless required by applicable law or agreed to in writing, software
14 : * distributed under the License is distributed on an "AS IS" BASIS,
15 : * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 : * See the License for the specific language governing permissions and
17 : * limitations under the License.
18 : */
19 :
20 : #include <chrono>
21 : #include <cinttypes>
22 : #include <stdexcept>
23 :
24 : #include "FlushWatcher.h"
25 : #include "GstDispatcherThread.h"
26 : #include "GstGenericPlayer.h"
27 : #include "GstProtectionMetadata.h"
28 : #include "IGstTextTrackSinkFactory.h"
29 : #include "IMediaPipeline.h"
30 : #include "ITimer.h"
31 : #include "RialtoServerLogging.h"
32 : #include "TypeConverters.h"
33 : #include "Utils.h"
34 : #include "WorkerThread.h"
35 : #include "tasks/generic/GenericPlayerTaskFactory.h"
36 :
37 : namespace
38 : {
39 : /**
40 : * @brief Report position interval in ms.
41 : * The position reporting timer should be started whenever the PLAYING state is entered and stopped
42 : * whenever the session moves to another playback state.
43 : */
44 : constexpr std::chrono::milliseconds kPositionReportTimerMs{250};
45 : constexpr std::chrono::seconds kSubtitleClockResyncInterval{10};
46 :
47 1 : bool operator==(const firebolt::rialto::server::SegmentData &lhs, const firebolt::rialto::server::SegmentData &rhs)
48 : {
49 2 : return (lhs.position == rhs.position) && (lhs.resetTime == rhs.resetTime) && (lhs.appliedRate == rhs.appliedRate) &&
50 2 : (lhs.stopPosition == rhs.stopPosition);
51 : }
52 : } // namespace
53 :
54 : namespace firebolt::rialto::server
55 : {
56 : std::weak_ptr<IGstGenericPlayerFactory> GstGenericPlayerFactory::m_factory;
57 :
58 3 : std::shared_ptr<IGstGenericPlayerFactory> IGstGenericPlayerFactory::getFactory()
59 : {
60 3 : std::shared_ptr<IGstGenericPlayerFactory> factory = GstGenericPlayerFactory::m_factory.lock();
61 :
62 3 : if (!factory)
63 : {
64 : try
65 : {
66 3 : factory = std::make_shared<GstGenericPlayerFactory>();
67 : }
68 0 : catch (const std::exception &e)
69 : {
70 0 : RIALTO_SERVER_LOG_ERROR("Failed to create the gstreamer player factory, reason: %s", e.what());
71 : }
72 :
73 3 : GstGenericPlayerFactory::m_factory = factory;
74 : }
75 :
76 3 : return factory;
77 : }
78 :
79 1 : std::unique_ptr<IGstGenericPlayer> GstGenericPlayerFactory::createGstGenericPlayer(
80 : IGstGenericPlayerClient *client, IDecryptionService &decryptionService, MediaType type,
81 : const VideoRequirements &videoRequirements,
82 : const std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapperFactory> &rdkGstreamerUtilsWrapperFactory)
83 : {
84 1 : std::unique_ptr<IGstGenericPlayer> gstPlayer;
85 :
86 : try
87 : {
88 1 : auto gstWrapperFactory = firebolt::rialto::wrappers::IGstWrapperFactory::getFactory();
89 1 : auto glibWrapperFactory = firebolt::rialto::wrappers::IGlibWrapperFactory::getFactory();
90 1 : std::shared_ptr<firebolt::rialto::wrappers::IGstWrapper> gstWrapper;
91 1 : std::shared_ptr<firebolt::rialto::wrappers::IGlibWrapper> glibWrapper;
92 1 : std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapper> rdkGstreamerUtilsWrapper;
93 1 : if ((!gstWrapperFactory) || (!(gstWrapper = gstWrapperFactory->getGstWrapper())))
94 : {
95 0 : throw std::runtime_error("Cannot create GstWrapper");
96 : }
97 1 : if ((!glibWrapperFactory) || (!(glibWrapper = glibWrapperFactory->getGlibWrapper())))
98 : {
99 0 : throw std::runtime_error("Cannot create GlibWrapper");
100 : }
101 2 : if ((!rdkGstreamerUtilsWrapperFactory) ||
102 2 : (!(rdkGstreamerUtilsWrapper = rdkGstreamerUtilsWrapperFactory->createRdkGstreamerUtilsWrapper())))
103 : {
104 0 : throw std::runtime_error("Cannot create RdkGstreamerUtilsWrapper");
105 : }
106 : gstPlayer = std::make_unique<
107 2 : GstGenericPlayer>(client, decryptionService, type, videoRequirements, gstWrapper, glibWrapper,
108 2 : rdkGstreamerUtilsWrapper, IGstInitialiser::instance(), std::make_unique<FlushWatcher>(),
109 2 : IGstSrcFactory::getFactory(), common::ITimerFactory::getFactory(),
110 2 : std::make_unique<GenericPlayerTaskFactory>(client, gstWrapper, glibWrapper,
111 : rdkGstreamerUtilsWrapper,
112 2 : IGstTextTrackSinkFactory::createFactory()),
113 2 : std::make_unique<WorkerThreadFactory>(), std::make_unique<GstDispatcherThreadFactory>(),
114 3 : IGstProtectionMetadataHelperFactory::createFactory());
115 1 : }
116 0 : catch (const std::exception &e)
117 : {
118 0 : RIALTO_SERVER_LOG_ERROR("Failed to create the gstreamer player, reason: %s", e.what());
119 : }
120 :
121 1 : return gstPlayer;
122 : }
123 :
124 211 : GstGenericPlayer::GstGenericPlayer(
125 : IGstGenericPlayerClient *client, IDecryptionService &decryptionService, MediaType type,
126 : const VideoRequirements &videoRequirements,
127 : const std::shared_ptr<firebolt::rialto::wrappers::IGstWrapper> &gstWrapper,
128 : const std::shared_ptr<firebolt::rialto::wrappers::IGlibWrapper> &glibWrapper,
129 : const std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapper> &rdkGstreamerUtilsWrapper,
130 : const IGstInitialiser &gstInitialiser, std::unique_ptr<IFlushWatcher> &&flushWatcher,
131 : const std::shared_ptr<IGstSrcFactory> &gstSrcFactory, std::shared_ptr<common::ITimerFactory> timerFactory,
132 : std::unique_ptr<IGenericPlayerTaskFactory> taskFactory, std::unique_ptr<IWorkerThreadFactory> workerThreadFactory,
133 : std::unique_ptr<IGstDispatcherThreadFactory> gstDispatcherThreadFactory,
134 211 : std::shared_ptr<IGstProtectionMetadataHelperFactory> gstProtectionMetadataFactory)
135 211 : : m_gstPlayerClient(client), m_gstWrapper{gstWrapper}, m_glibWrapper{glibWrapper},
136 422 : m_rdkGstreamerUtilsWrapper{rdkGstreamerUtilsWrapper}, m_timerFactory{timerFactory},
137 633 : m_taskFactory{std::move(taskFactory)}, m_flushWatcher{std::move(flushWatcher)}
138 : {
139 211 : RIALTO_SERVER_LOG_DEBUG("GstGenericPlayer is constructed.");
140 :
141 211 : gstInitialiser.waitForInitialisation();
142 :
143 211 : m_context.decryptionService = &decryptionService;
144 :
145 211 : if ((!gstSrcFactory) || (!(m_context.gstSrc = gstSrcFactory->getGstSrc())))
146 : {
147 2 : throw std::runtime_error("Cannot create GstSrc");
148 : }
149 :
150 209 : if (!timerFactory)
151 : {
152 1 : throw std::runtime_error("TimeFactory is invalid");
153 : }
154 :
155 416 : if ((!gstProtectionMetadataFactory) ||
156 416 : (!(m_protectionMetadataWrapper = gstProtectionMetadataFactory->createProtectionMetadataWrapper(m_gstWrapper))))
157 : {
158 0 : throw std::runtime_error("Cannot create protection metadata wrapper");
159 : }
160 :
161 : // Ensure that rialtosrc has been initalised
162 208 : m_context.gstSrc->initSrc();
163 :
164 : // Start task thread
165 208 : if ((!workerThreadFactory) || (!(m_workerThread = workerThreadFactory->createWorkerThread())))
166 : {
167 0 : throw std::runtime_error("Failed to create the worker thread");
168 : }
169 :
170 : // Initialise pipeline
171 208 : switch (type)
172 : {
173 207 : case MediaType::MSE:
174 : {
175 207 : initMsePipeline();
176 207 : break;
177 : }
178 1 : default:
179 : {
180 1 : resetWorkerThread();
181 1 : throw std::runtime_error("Media type not supported");
182 : }
183 : }
184 :
185 : // Check the video requirements for a limited video.
186 : // If the video requirements are set to anything lower than the minimum, this playback is assumed to be a secondary
187 : // video in a dual video scenario.
188 207 : if ((kMinPrimaryVideoWidth > videoRequirements.maxWidth) || (kMinPrimaryVideoHeight > videoRequirements.maxHeight))
189 : {
190 8 : RIALTO_SERVER_LOG_MIL("Secondary video playback selected");
191 8 : bool westerossinkSecondaryVideoResult = setWesterossinkSecondaryVideo();
192 8 : bool ermContextResult = setErmContext();
193 8 : if (!westerossinkSecondaryVideoResult && !ermContextResult)
194 : {
195 1 : resetWorkerThread();
196 1 : termPipeline();
197 1 : throw std::runtime_error("Could not set secondary video");
198 : }
199 7 : }
200 : else
201 : {
202 199 : RIALTO_SERVER_LOG_MIL("Primary video playback selected");
203 : }
204 :
205 : m_gstDispatcherThread =
206 206 : gstDispatcherThreadFactory->createGstDispatcherThread(*this, m_context.pipeline, m_gstWrapper);
207 286 : }
208 :
209 412 : GstGenericPlayer::~GstGenericPlayer()
210 : {
211 206 : RIALTO_SERVER_LOG_DEBUG("GstGenericPlayer is destructed.");
212 206 : m_gstDispatcherThread.reset();
213 :
214 206 : resetWorkerThread();
215 :
216 206 : termPipeline();
217 412 : }
218 :
219 207 : void GstGenericPlayer::initMsePipeline()
220 : {
221 : // Make playbin
222 207 : m_context.pipeline = m_gstWrapper->gstElementFactoryMake("playbin", "media_pipeline");
223 : // Set pipeline flags
224 207 : setPlaybinFlags(true);
225 :
226 : // Set callbacks
227 207 : m_glibWrapper->gSignalConnect(m_context.pipeline, "source-setup", G_CALLBACK(&GstGenericPlayer::setupSource), this);
228 207 : m_glibWrapper->gSignalConnect(m_context.pipeline, "element-setup", G_CALLBACK(&GstGenericPlayer::setupElement), this);
229 207 : m_glibWrapper->gSignalConnect(m_context.pipeline, "deep-element-added",
230 : G_CALLBACK(&GstGenericPlayer::deepElementAdded), this);
231 :
232 : // Set uri
233 207 : m_glibWrapper->gObjectSet(m_context.pipeline, "uri", "rialto://", nullptr);
234 :
235 : // Check playsink
236 207 : GstElement *playsink = (m_gstWrapper->gstBinGetByName(GST_BIN(m_context.pipeline), "playsink"));
237 207 : if (playsink)
238 : {
239 206 : m_glibWrapper->gObjectSet(G_OBJECT(playsink), "send-event-mode", 0, nullptr);
240 206 : m_gstWrapper->gstObjectUnref(playsink);
241 : }
242 : else
243 : {
244 1 : GST_WARNING("No playsink ?!?!?");
245 : }
246 207 : if (GST_STATE_CHANGE_FAILURE == m_gstWrapper->gstElementSetState(m_context.pipeline, GST_STATE_READY))
247 : {
248 1 : GST_WARNING("Failed to set pipeline to READY state");
249 : }
250 207 : RIALTO_SERVER_LOG_MIL("New RialtoServer's pipeline created");
251 : }
252 :
253 208 : void GstGenericPlayer::resetWorkerThread()
254 : {
255 : // Shutdown task thread
256 208 : m_workerThread->enqueueTask(m_taskFactory->createShutdown(*this));
257 208 : m_workerThread->join();
258 208 : m_workerThread.reset();
259 : }
260 :
261 207 : void GstGenericPlayer::termPipeline()
262 : {
263 207 : if (m_finishSourceSetupTimer && m_finishSourceSetupTimer->isActive())
264 : {
265 0 : m_finishSourceSetupTimer->cancel();
266 : }
267 :
268 207 : m_finishSourceSetupTimer.reset();
269 :
270 256 : for (auto &elem : m_context.streamInfo)
271 : {
272 49 : StreamInfo &streamInfo = elem.second;
273 51 : for (auto &buffer : streamInfo.buffers)
274 : {
275 2 : m_gstWrapper->gstBufferUnref(buffer);
276 : }
277 :
278 49 : streamInfo.buffers.clear();
279 : }
280 :
281 207 : m_taskFactory->createStop(m_context, *this)->execute();
282 207 : GstBus *bus = m_gstWrapper->gstPipelineGetBus(GST_PIPELINE(m_context.pipeline));
283 207 : m_gstWrapper->gstBusSetSyncHandler(bus, nullptr, nullptr, nullptr);
284 207 : m_gstWrapper->gstObjectUnref(bus);
285 :
286 207 : if (m_context.source)
287 : {
288 1 : m_gstWrapper->gstObjectUnref(m_context.source);
289 : }
290 207 : if (m_context.subtitleSink)
291 : {
292 4 : m_gstWrapper->gstObjectUnref(m_context.subtitleSink);
293 4 : m_context.subtitleSink = nullptr;
294 : }
295 :
296 207 : if (m_context.videoSink)
297 : {
298 0 : m_gstWrapper->gstObjectUnref(m_context.videoSink);
299 0 : m_context.videoSink = nullptr;
300 : }
301 :
302 : // Delete the pipeline
303 207 : m_gstWrapper->gstObjectUnref(m_context.pipeline);
304 :
305 207 : RIALTO_SERVER_LOG_MIL("RialtoServer's pipeline terminated");
306 : }
307 :
308 829 : unsigned GstGenericPlayer::getGstPlayFlag(const char *nick)
309 : {
310 : GFlagsClass *flagsClass =
311 829 : static_cast<GFlagsClass *>(m_glibWrapper->gTypeClassRef(m_glibWrapper->gTypeFromName("GstPlayFlags")));
312 829 : GFlagsValue *flag = m_glibWrapper->gFlagsGetValueByNick(flagsClass, nick);
313 829 : return flag ? flag->value : 0;
314 : }
315 :
316 1 : void GstGenericPlayer::setupSource(GstElement *pipeline, GstElement *source, GstGenericPlayer *self)
317 : {
318 1 : self->m_gstWrapper->gstObjectRef(source);
319 1 : if (self->m_workerThread)
320 : {
321 1 : self->m_workerThread->enqueueTask(self->m_taskFactory->createSetupSource(self->m_context, *self, source));
322 : }
323 : }
324 :
325 1 : void GstGenericPlayer::setupElement(GstElement *pipeline, GstElement *element, GstGenericPlayer *self)
326 : {
327 1 : RIALTO_SERVER_LOG_DEBUG("Element %s added to the pipeline", GST_ELEMENT_NAME(element));
328 1 : self->m_gstWrapper->gstObjectRef(element);
329 1 : if (self->m_workerThread)
330 : {
331 1 : self->m_workerThread->enqueueTask(self->m_taskFactory->createSetupElement(self->m_context, *self, element));
332 : }
333 : }
334 :
335 1 : void GstGenericPlayer::deepElementAdded(GstBin *pipeline, GstBin *bin, GstElement *element, GstGenericPlayer *self)
336 : {
337 1 : RIALTO_SERVER_LOG_DEBUG("Deep element %s added to the pipeline", GST_ELEMENT_NAME(element));
338 1 : if (self->m_workerThread)
339 : {
340 2 : self->m_workerThread->enqueueTask(
341 2 : self->m_taskFactory->createDeepElementAdded(self->m_context, *self, pipeline, bin, element));
342 : }
343 1 : }
344 :
345 1 : void GstGenericPlayer::attachSource(const std::unique_ptr<IMediaPipeline::MediaSource> &attachedSource)
346 : {
347 1 : if (m_workerThread)
348 : {
349 1 : m_workerThread->enqueueTask(m_taskFactory->createAttachSource(m_context, *this, attachedSource));
350 : }
351 : }
352 :
353 1 : void GstGenericPlayer::removeSource(const MediaSourceType &mediaSourceType)
354 : {
355 1 : if (m_workerThread)
356 : {
357 1 : m_workerThread->enqueueTask(m_taskFactory->createRemoveSource(m_context, *this, mediaSourceType));
358 : }
359 : }
360 :
361 2 : void GstGenericPlayer::allSourcesAttached()
362 : {
363 2 : if (m_workerThread)
364 : {
365 2 : m_workerThread->enqueueTask(m_taskFactory->createFinishSetupSource(m_context, *this));
366 : }
367 : }
368 :
369 1 : void GstGenericPlayer::attachSamples(const IMediaPipeline::MediaSegmentVector &mediaSegments)
370 : {
371 1 : if (m_workerThread)
372 : {
373 1 : m_workerThread->enqueueTask(m_taskFactory->createAttachSamples(m_context, *this, mediaSegments));
374 : }
375 : }
376 :
377 1 : void GstGenericPlayer::attachSamples(const std::shared_ptr<IDataReader> &dataReader)
378 : {
379 1 : if (m_workerThread)
380 : {
381 1 : m_workerThread->enqueueTask(m_taskFactory->createReadShmDataAndAttachSamples(m_context, *this, dataReader));
382 : }
383 : }
384 :
385 1 : void GstGenericPlayer::setPosition(std::int64_t position)
386 : {
387 1 : if (m_workerThread)
388 : {
389 1 : m_workerThread->enqueueTask(m_taskFactory->createSetPosition(m_context, *this, position));
390 : }
391 : }
392 :
393 1 : void GstGenericPlayer::setPlaybackRate(double rate)
394 : {
395 1 : if (m_workerThread)
396 : {
397 1 : m_workerThread->enqueueTask(m_taskFactory->createSetPlaybackRate(m_context, rate));
398 : }
399 : }
400 :
401 5 : bool GstGenericPlayer::getPosition(std::int64_t &position)
402 : {
403 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
404 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
405 5 : position = getPosition(m_context.pipeline);
406 5 : if (position == -1)
407 : {
408 3 : RIALTO_SERVER_LOG_WARN("Query position failed");
409 3 : return false;
410 : }
411 :
412 2 : return true;
413 : }
414 :
415 38 : GstElement *GstGenericPlayer::getSink(const MediaSourceType &mediaSourceType) const
416 : {
417 38 : const char *kSinkName{nullptr};
418 38 : GstElement *sink{nullptr};
419 38 : switch (mediaSourceType)
420 : {
421 18 : case MediaSourceType::AUDIO:
422 18 : kSinkName = "audio-sink";
423 18 : break;
424 18 : case MediaSourceType::VIDEO:
425 18 : kSinkName = "video-sink";
426 18 : break;
427 2 : default:
428 2 : break;
429 : }
430 38 : if (!kSinkName)
431 : {
432 2 : RIALTO_SERVER_LOG_WARN("mediaSourceType not supported %d", static_cast<int>(mediaSourceType));
433 : }
434 : else
435 : {
436 36 : if (m_context.pipeline == nullptr)
437 : {
438 0 : RIALTO_SERVER_LOG_WARN("Pipeline is NULL!");
439 : }
440 : else
441 : {
442 36 : RIALTO_SERVER_LOG_DEBUG("Pipeline is valid: %p", m_context.pipeline);
443 : }
444 36 : m_glibWrapper->gObjectGet(m_context.pipeline, kSinkName, &sink, nullptr);
445 36 : if (sink)
446 : {
447 25 : GstElement *autoSink{sink};
448 25 : if (firebolt::rialto::MediaSourceType::VIDEO == mediaSourceType)
449 14 : autoSink = getSinkChildIfAutoVideoSink(sink);
450 11 : else if (firebolt::rialto::MediaSourceType::AUDIO == mediaSourceType)
451 11 : autoSink = getSinkChildIfAutoAudioSink(sink);
452 :
453 : // Is this an auto-sink?...
454 25 : if (autoSink != sink)
455 : {
456 2 : m_gstWrapper->gstObjectUnref(GST_OBJECT(sink));
457 :
458 : // increase the reference count of the auto sink
459 2 : sink = GST_ELEMENT(m_gstWrapper->gstObjectRef(GST_OBJECT(autoSink)));
460 : }
461 : }
462 : else
463 : {
464 11 : RIALTO_SERVER_LOG_WARN("%s could not be obtained", kSinkName);
465 : }
466 : }
467 38 : return sink;
468 : }
469 :
470 1 : void GstGenericPlayer::setSourceFlushed(const MediaSourceType &mediaSourceType)
471 : {
472 1 : m_flushWatcher->setFlushed(mediaSourceType);
473 : }
474 :
475 19 : GstElement *GstGenericPlayer::getDecoder(const MediaSourceType &mediaSourceType)
476 : {
477 19 : GstIterator *it = m_gstWrapper->gstBinIterateRecurse(GST_BIN(m_context.pipeline));
478 19 : GValue item = G_VALUE_INIT;
479 19 : gboolean done = FALSE;
480 :
481 28 : while (!done)
482 : {
483 21 : switch (m_gstWrapper->gstIteratorNext(it, &item))
484 : {
485 12 : case GST_ITERATOR_OK:
486 : {
487 12 : GstElement *element = GST_ELEMENT(m_glibWrapper->gValueGetObject(&item));
488 12 : GstElementFactory *factory = m_gstWrapper->gstElementGetFactory(element);
489 :
490 12 : if (factory)
491 : {
492 12 : GstElementFactoryListType type = GST_ELEMENT_FACTORY_TYPE_DECODER;
493 12 : if (mediaSourceType == MediaSourceType::AUDIO)
494 : {
495 12 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO;
496 : }
497 0 : else if (mediaSourceType == MediaSourceType::VIDEO)
498 : {
499 0 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO;
500 : }
501 :
502 12 : if (m_gstWrapper->gstElementFactoryListIsType(factory, type))
503 : {
504 12 : m_glibWrapper->gValueUnset(&item);
505 12 : m_gstWrapper->gstIteratorFree(it);
506 12 : return GST_ELEMENT(m_gstWrapper->gstObjectRef(element));
507 : }
508 : }
509 :
510 0 : m_glibWrapper->gValueUnset(&item);
511 0 : break;
512 : }
513 2 : case GST_ITERATOR_RESYNC:
514 2 : m_gstWrapper->gstIteratorResync(it);
515 2 : break;
516 7 : case GST_ITERATOR_ERROR:
517 : case GST_ITERATOR_DONE:
518 7 : done = TRUE;
519 7 : break;
520 : }
521 : }
522 :
523 7 : RIALTO_SERVER_LOG_WARN("Could not find decoder");
524 :
525 7 : m_glibWrapper->gValueUnset(&item);
526 7 : m_gstWrapper->gstIteratorFree(it);
527 :
528 7 : return nullptr;
529 : }
530 :
531 3 : GstElement *GstGenericPlayer::getParser(const MediaSourceType &mediaSourceType)
532 : {
533 3 : GstIterator *it = m_gstWrapper->gstBinIterateRecurse(GST_BIN(m_context.pipeline));
534 3 : GValue item = G_VALUE_INIT;
535 3 : gboolean done = FALSE;
536 :
537 4 : while (!done)
538 : {
539 3 : switch (m_gstWrapper->gstIteratorNext(it, &item))
540 : {
541 2 : case GST_ITERATOR_OK:
542 : {
543 2 : GstElement *element = GST_ELEMENT(m_glibWrapper->gValueGetObject(&item));
544 2 : GstElementFactory *factory = m_gstWrapper->gstElementGetFactory(element);
545 :
546 2 : if (factory)
547 : {
548 2 : GstElementFactoryListType type = GST_ELEMENT_FACTORY_TYPE_PARSER;
549 2 : if (mediaSourceType == MediaSourceType::AUDIO)
550 : {
551 0 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO;
552 : }
553 2 : else if (mediaSourceType == MediaSourceType::VIDEO)
554 : {
555 2 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO;
556 : }
557 :
558 2 : if (m_gstWrapper->gstElementFactoryListIsType(factory, type))
559 : {
560 2 : m_glibWrapper->gValueUnset(&item);
561 2 : m_gstWrapper->gstIteratorFree(it);
562 2 : return GST_ELEMENT(m_gstWrapper->gstObjectRef(element));
563 : }
564 : }
565 :
566 0 : m_glibWrapper->gValueUnset(&item);
567 0 : break;
568 : }
569 0 : case GST_ITERATOR_RESYNC:
570 0 : m_gstWrapper->gstIteratorResync(it);
571 0 : break;
572 1 : case GST_ITERATOR_ERROR:
573 : case GST_ITERATOR_DONE:
574 1 : done = TRUE;
575 1 : break;
576 : }
577 : }
578 :
579 1 : RIALTO_SERVER_LOG_WARN("Could not find parser");
580 :
581 1 : m_glibWrapper->gValueUnset(&item);
582 1 : m_gstWrapper->gstIteratorFree(it);
583 :
584 1 : return nullptr;
585 : }
586 :
587 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate>
588 5 : GstGenericPlayer::createAudioAttributes(const std::unique_ptr<IMediaPipeline::MediaSource> &source) const
589 : {
590 5 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate> audioAttributes;
591 5 : const IMediaPipeline::MediaSourceAudio *kSource = dynamic_cast<IMediaPipeline::MediaSourceAudio *>(source.get());
592 5 : if (kSource)
593 : {
594 4 : firebolt::rialto::AudioConfig audioConfig = kSource->getAudioConfig();
595 : audioAttributes =
596 12 : firebolt::rialto::wrappers::AudioAttributesPrivate{"", // param set below.
597 4 : audioConfig.numberOfChannels, audioConfig.sampleRate,
598 : 0, // used only in one of logs in rdk_gstreamer_utils, no
599 : // need to set this param.
600 : 0, // used only in one of logs in rdk_gstreamer_utils, no
601 : // need to set this param.
602 4 : audioConfig.codecSpecificConfig.data(),
603 : static_cast<std::uint32_t>(
604 4 : audioConfig.codecSpecificConfig.size())};
605 4 : if (source->getMimeType() == "audio/mp4" || source->getMimeType() == "audio/aac")
606 : {
607 2 : audioAttributes->m_codecParam = "mp4a";
608 : }
609 2 : else if (source->getMimeType() == "audio/x-eac3")
610 : {
611 1 : audioAttributes->m_codecParam = "ec-3";
612 : }
613 1 : else if (source->getMimeType() == "audio/b-wav" || source->getMimeType() == "audio/x-raw")
614 : {
615 1 : audioAttributes->m_codecParam = "lpcm";
616 : }
617 4 : }
618 : else
619 : {
620 1 : RIALTO_SERVER_LOG_ERROR("Failed to cast source");
621 : }
622 :
623 5 : return audioAttributes;
624 : }
625 :
626 1 : bool GstGenericPlayer::setImmediateOutput(const MediaSourceType &mediaSourceType, bool immediateOutputParam)
627 : {
628 1 : if (!m_workerThread)
629 0 : return false;
630 :
631 2 : m_workerThread->enqueueTask(
632 2 : m_taskFactory->createSetImmediateOutput(m_context, *this, mediaSourceType, immediateOutputParam));
633 1 : return true;
634 : }
635 :
636 5 : bool GstGenericPlayer::getImmediateOutput(const MediaSourceType &mediaSourceType, bool &immediateOutputRef)
637 : {
638 5 : bool returnValue{false};
639 5 : GstElement *sink{getSink(mediaSourceType)};
640 5 : if (sink)
641 : {
642 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "immediate-output"))
643 : {
644 2 : m_glibWrapper->gObjectGet(sink, "immediate-output", &immediateOutputRef, nullptr);
645 2 : returnValue = true;
646 : }
647 : else
648 : {
649 1 : RIALTO_SERVER_LOG_ERROR("immediate-output not supported in element %s", GST_ELEMENT_NAME(sink));
650 : }
651 3 : m_gstWrapper->gstObjectUnref(sink);
652 : }
653 : else
654 : {
655 2 : RIALTO_SERVER_LOG_ERROR("Failed to set immediate-output property, sink is NULL");
656 : }
657 :
658 5 : return returnValue;
659 : }
660 :
661 5 : bool GstGenericPlayer::getStats(const MediaSourceType &mediaSourceType, uint64_t &renderedFrames, uint64_t &droppedFrames)
662 : {
663 5 : bool returnValue{false};
664 5 : GstElement *sink{getSink(mediaSourceType)};
665 5 : if (sink)
666 : {
667 3 : GstStructure *stats{nullptr};
668 3 : m_glibWrapper->gObjectGet(sink, "stats", &stats, nullptr);
669 3 : if (!stats)
670 : {
671 1 : RIALTO_SERVER_LOG_ERROR("failed to get stats from '%s'", GST_ELEMENT_NAME(sink));
672 : }
673 : else
674 : {
675 : guint64 renderedFramesTmp;
676 : guint64 droppedFramesTmp;
677 3 : if (m_gstWrapper->gstStructureGetUint64(stats, "rendered", &renderedFramesTmp) &&
678 1 : m_gstWrapper->gstStructureGetUint64(stats, "dropped", &droppedFramesTmp))
679 : {
680 1 : renderedFrames = renderedFramesTmp;
681 1 : droppedFrames = droppedFramesTmp;
682 1 : returnValue = true;
683 : }
684 : else
685 : {
686 1 : RIALTO_SERVER_LOG_ERROR("failed to get 'rendered' or 'dropped' from structure (%s)",
687 : GST_ELEMENT_NAME(sink));
688 : }
689 2 : m_gstWrapper->gstStructureFree(stats);
690 : }
691 3 : m_gstWrapper->gstObjectUnref(sink);
692 : }
693 :
694 5 : return returnValue;
695 : }
696 :
697 4 : GstBuffer *GstGenericPlayer::createBuffer(const IMediaPipeline::MediaSegment &mediaSegment) const
698 : {
699 4 : GstBuffer *gstBuffer = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getDataLength(), nullptr);
700 4 : m_gstWrapper->gstBufferFill(gstBuffer, 0, mediaSegment.getData(), mediaSegment.getDataLength());
701 :
702 4 : if (mediaSegment.isEncrypted())
703 : {
704 3 : GstBuffer *keyId = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getKeyId().size(), nullptr);
705 3 : m_gstWrapper->gstBufferFill(keyId, 0, mediaSegment.getKeyId().data(), mediaSegment.getKeyId().size());
706 :
707 3 : GstBuffer *initVector = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getInitVector().size(), nullptr);
708 6 : m_gstWrapper->gstBufferFill(initVector, 0, mediaSegment.getInitVector().data(),
709 3 : mediaSegment.getInitVector().size());
710 3 : GstBuffer *subsamples{nullptr};
711 3 : if (!mediaSegment.getSubSamples().empty())
712 : {
713 3 : auto subsamplesRawSize = mediaSegment.getSubSamples().size() * (sizeof(guint16) + sizeof(guint32));
714 3 : guint8 *subsamplesRaw = static_cast<guint8 *>(m_glibWrapper->gMalloc(subsamplesRawSize));
715 : GstByteWriter writer;
716 3 : m_gstWrapper->gstByteWriterInitWithData(&writer, subsamplesRaw, subsamplesRawSize, FALSE);
717 :
718 6 : for (const auto &subSample : mediaSegment.getSubSamples())
719 : {
720 3 : m_gstWrapper->gstByteWriterPutUint16Be(&writer, subSample.numClearBytes);
721 3 : m_gstWrapper->gstByteWriterPutUint32Be(&writer, subSample.numEncryptedBytes);
722 : }
723 3 : subsamples = m_gstWrapper->gstBufferNewWrapped(subsamplesRaw, subsamplesRawSize);
724 : }
725 :
726 3 : uint32_t crypt = 0;
727 3 : uint32_t skip = 0;
728 3 : bool encryptionPatternSet = mediaSegment.getEncryptionPattern(crypt, skip);
729 :
730 3 : GstRialtoProtectionData data = {mediaSegment.getMediaKeySessionId(),
731 3 : static_cast<uint32_t>(mediaSegment.getSubSamples().size()),
732 3 : mediaSegment.getInitWithLast15(),
733 : keyId,
734 : initVector,
735 : subsamples,
736 6 : mediaSegment.getCipherMode(),
737 : crypt,
738 : skip,
739 : encryptionPatternSet,
740 6 : m_context.decryptionService};
741 :
742 3 : if (!m_protectionMetadataWrapper->addProtectionMetadata(gstBuffer, data))
743 : {
744 1 : RIALTO_SERVER_LOG_ERROR("Failed to add protection metadata");
745 1 : if (keyId)
746 : {
747 1 : m_gstWrapper->gstBufferUnref(keyId);
748 : }
749 1 : if (initVector)
750 : {
751 1 : m_gstWrapper->gstBufferUnref(initVector);
752 : }
753 1 : if (subsamples)
754 : {
755 1 : m_gstWrapper->gstBufferUnref(subsamples);
756 : }
757 : }
758 : }
759 :
760 4 : GST_BUFFER_TIMESTAMP(gstBuffer) = mediaSegment.getTimeStamp();
761 4 : GST_BUFFER_DURATION(gstBuffer) = mediaSegment.getDuration();
762 4 : return gstBuffer;
763 : }
764 :
765 4 : void GstGenericPlayer::notifyNeedMediaData(const MediaSourceType mediaSource)
766 : {
767 4 : auto elem = m_context.streamInfo.find(mediaSource);
768 4 : if (elem != m_context.streamInfo.end())
769 : {
770 2 : StreamInfo &streamInfo = elem->second;
771 2 : streamInfo.isNeedDataPending = false;
772 :
773 : // Send new NeedMediaData if we still need it
774 2 : if (m_gstPlayerClient && streamInfo.isDataNeeded)
775 : {
776 2 : streamInfo.isNeedDataPending = m_gstPlayerClient->notifyNeedMediaData(mediaSource);
777 : }
778 : }
779 : else
780 : {
781 2 : RIALTO_SERVER_LOG_WARN("Media type %s could not be found", common::convertMediaSourceType(mediaSource));
782 : }
783 4 : }
784 :
785 19 : void GstGenericPlayer::attachData(const firebolt::rialto::MediaSourceType mediaType)
786 : {
787 19 : auto elem = m_context.streamInfo.find(mediaType);
788 19 : if (elem != m_context.streamInfo.end())
789 : {
790 16 : StreamInfo &streamInfo = elem->second;
791 16 : if (streamInfo.buffers.empty() || !streamInfo.isDataNeeded)
792 : {
793 2 : return;
794 : }
795 :
796 14 : if (firebolt::rialto::MediaSourceType::SUBTITLE == mediaType)
797 : {
798 2 : setTextTrackPositionIfRequired(streamInfo.appSrc);
799 : }
800 : else
801 : {
802 36 : pushSampleIfRequired(streamInfo.appSrc, common::convertMediaSourceType(mediaType));
803 : }
804 14 : if (mediaType == firebolt::rialto::MediaSourceType::AUDIO)
805 : {
806 : // This needs to be done before gstAppSrcPushBuffer() is
807 : // called because it can free the memory
808 7 : m_context.lastAudioSampleTimestamps = static_cast<int64_t>(GST_BUFFER_PTS(streamInfo.buffers.back()));
809 : }
810 :
811 28 : for (GstBuffer *buffer : streamInfo.buffers)
812 : {
813 14 : m_gstWrapper->gstAppSrcPushBuffer(GST_APP_SRC(streamInfo.appSrc), buffer);
814 : }
815 14 : streamInfo.buffers.clear();
816 14 : streamInfo.isDataPushed = true;
817 :
818 14 : const bool kIsSingle = m_context.streamInfo.size() == 1;
819 14 : bool allOtherStreamsPushed = std::all_of(m_context.streamInfo.begin(), m_context.streamInfo.end(),
820 15 : [](const auto &entry) { return entry.second.isDataPushed; });
821 :
822 14 : if (!m_context.bufferedNotificationSent && (allOtherStreamsPushed || kIsSingle) && m_gstPlayerClient)
823 : {
824 1 : m_context.bufferedNotificationSent = true;
825 1 : m_gstPlayerClient->notifyNetworkState(NetworkState::BUFFERED);
826 1 : RIALTO_SERVER_LOG_MIL("Buffered NetworkState reached");
827 : }
828 14 : cancelUnderflow(mediaType);
829 :
830 14 : const auto eosInfoIt = m_context.endOfStreamInfo.find(mediaType);
831 14 : if (eosInfoIt != m_context.endOfStreamInfo.end() && eosInfoIt->second == EosState::PENDING)
832 : {
833 0 : setEos(mediaType);
834 : }
835 : }
836 : }
837 :
838 7 : void GstGenericPlayer::updateAudioCaps(int32_t rate, int32_t channels, const std::shared_ptr<CodecData> &codecData)
839 : {
840 7 : auto elem = m_context.streamInfo.find(firebolt::rialto::MediaSourceType::AUDIO);
841 7 : if (elem != m_context.streamInfo.end())
842 : {
843 6 : StreamInfo &streamInfo = elem->second;
844 :
845 6 : constexpr int kInvalidRate{0}, kInvalidChannels{0};
846 6 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(streamInfo.appSrc));
847 6 : GstCaps *newCaps = m_gstWrapper->gstCapsCopy(currentCaps);
848 :
849 6 : if (rate != kInvalidRate)
850 : {
851 3 : m_gstWrapper->gstCapsSetSimple(newCaps, "rate", G_TYPE_INT, rate, NULL);
852 : }
853 :
854 6 : if (channels != kInvalidChannels)
855 : {
856 3 : m_gstWrapper->gstCapsSetSimple(newCaps, "channels", G_TYPE_INT, channels, NULL);
857 : }
858 :
859 6 : setCodecData(newCaps, codecData);
860 :
861 6 : if (!m_gstWrapper->gstCapsIsEqual(currentCaps, newCaps))
862 : {
863 5 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(streamInfo.appSrc), newCaps);
864 : }
865 :
866 6 : m_gstWrapper->gstCapsUnref(newCaps);
867 6 : m_gstWrapper->gstCapsUnref(currentCaps);
868 : }
869 7 : }
870 :
871 8 : void GstGenericPlayer::updateVideoCaps(int32_t width, int32_t height, Fraction frameRate,
872 : const std::shared_ptr<CodecData> &codecData)
873 : {
874 8 : auto elem = m_context.streamInfo.find(firebolt::rialto::MediaSourceType::VIDEO);
875 8 : if (elem != m_context.streamInfo.end())
876 : {
877 7 : StreamInfo &streamInfo = elem->second;
878 :
879 7 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(streamInfo.appSrc));
880 7 : GstCaps *newCaps = m_gstWrapper->gstCapsCopy(currentCaps);
881 :
882 7 : if (width > 0)
883 : {
884 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "width", G_TYPE_INT, width, NULL);
885 : }
886 :
887 7 : if (height > 0)
888 : {
889 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "height", G_TYPE_INT, height, NULL);
890 : }
891 :
892 7 : if ((kUndefinedSize != frameRate.numerator) && (kUndefinedSize != frameRate.denominator))
893 : {
894 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "framerate", GST_TYPE_FRACTION, frameRate.numerator,
895 : frameRate.denominator, NULL);
896 : }
897 :
898 7 : setCodecData(newCaps, codecData);
899 :
900 7 : if (!m_gstWrapper->gstCapsIsEqual(currentCaps, newCaps))
901 : {
902 6 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(streamInfo.appSrc), newCaps);
903 : }
904 :
905 7 : m_gstWrapper->gstCapsUnref(currentCaps);
906 7 : m_gstWrapper->gstCapsUnref(newCaps);
907 : }
908 8 : }
909 :
910 5 : void GstGenericPlayer::addAudioClippingToBuffer(GstBuffer *buffer, uint64_t clippingStart, uint64_t clippingEnd) const
911 : {
912 5 : if (clippingStart || clippingEnd)
913 : {
914 4 : if (m_gstWrapper->gstBufferAddAudioClippingMeta(buffer, GST_FORMAT_TIME, clippingStart, clippingEnd))
915 : {
916 3 : RIALTO_SERVER_LOG_DEBUG("Added audio clipping to buffer %p, start: %" PRIu64 ", end %" PRIu64, buffer,
917 : clippingStart, clippingEnd);
918 : }
919 : else
920 : {
921 1 : RIALTO_SERVER_LOG_WARN("Failed to add audio clipping to buffer %p, start: %" PRIu64 ", end %" PRIu64,
922 : buffer, clippingStart, clippingEnd);
923 : }
924 : }
925 5 : }
926 :
927 13 : bool GstGenericPlayer::setCodecData(GstCaps *caps, const std::shared_ptr<CodecData> &codecData) const
928 : {
929 13 : if (codecData && CodecDataType::BUFFER == codecData->type)
930 : {
931 7 : gpointer memory = m_glibWrapper->gMemdup(codecData->data.data(), codecData->data.size());
932 7 : GstBuffer *buf = m_gstWrapper->gstBufferNewWrapped(memory, codecData->data.size());
933 7 : m_gstWrapper->gstCapsSetSimple(caps, "codec_data", GST_TYPE_BUFFER, buf, nullptr);
934 7 : m_gstWrapper->gstBufferUnref(buf);
935 7 : return true;
936 : }
937 6 : if (codecData && CodecDataType::STRING == codecData->type)
938 : {
939 2 : std::string codecDataStr(codecData->data.begin(), codecData->data.end());
940 2 : m_gstWrapper->gstCapsSetSimple(caps, "codec_data", G_TYPE_STRING, codecDataStr.c_str(), nullptr);
941 2 : return true;
942 : }
943 4 : return false;
944 : }
945 :
946 12 : void GstGenericPlayer::pushSampleIfRequired(GstElement *source, const std::string &typeStr)
947 : {
948 12 : auto initialPosition = m_context.initialPositions.find(source);
949 12 : if (m_context.initialPositions.end() == initialPosition)
950 : {
951 : // Sending initial sample not needed
952 7 : return;
953 : }
954 : // GstAppSrc does not replace segment, if it's the same as previous one.
955 : // It causes problems with position reporing in amlogic devices, so we need to push
956 : // two segments with different reset time value.
957 5 : pushAdditionalSegmentIfRequired(source);
958 :
959 10 : for (const auto &[position, resetTime, appliedRate, stopPosition] : initialPosition->second)
960 : {
961 6 : GstSeekFlags seekFlag = resetTime ? GST_SEEK_FLAG_FLUSH : GST_SEEK_FLAG_NONE;
962 6 : RIALTO_SERVER_LOG_DEBUG("Pushing new %s sample...", typeStr.c_str());
963 6 : GstSegment *segment{m_gstWrapper->gstSegmentNew()};
964 6 : m_gstWrapper->gstSegmentInit(segment, GST_FORMAT_TIME);
965 6 : if (!m_gstWrapper->gstSegmentDoSeek(segment, m_context.playbackRate, GST_FORMAT_TIME, seekFlag,
966 : GST_SEEK_TYPE_SET, position, GST_SEEK_TYPE_SET, stopPosition, nullptr))
967 : {
968 1 : RIALTO_SERVER_LOG_WARN("Segment seek failed.");
969 1 : m_gstWrapper->gstSegmentFree(segment);
970 1 : m_context.initialPositions.erase(initialPosition);
971 1 : return;
972 : }
973 5 : segment->applied_rate = appliedRate;
974 5 : RIALTO_SERVER_LOG_MIL("New %s segment: [%" GST_TIME_FORMAT ", %" GST_TIME_FORMAT
975 : "], rate: %f, appliedRate %f, reset_time: %d\n",
976 : typeStr.c_str(), GST_TIME_ARGS(segment->start), GST_TIME_ARGS(segment->stop),
977 : segment->rate, segment->applied_rate, resetTime);
978 :
979 5 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(source));
980 : // We can't pass buffer in GstSample, because implementation of gst_app_src_push_sample
981 : // uses gst_buffer_copy, which loses RialtoProtectionMeta (that causes problems with EME
982 : // for first frame).
983 5 : GstSample *sample = m_gstWrapper->gstSampleNew(nullptr, currentCaps, segment, nullptr);
984 5 : m_gstWrapper->gstAppSrcPushSample(GST_APP_SRC(source), sample);
985 5 : m_gstWrapper->gstSampleUnref(sample);
986 5 : m_gstWrapper->gstCapsUnref(currentCaps);
987 :
988 5 : m_gstWrapper->gstSegmentFree(segment);
989 : }
990 4 : m_context.currentPosition[source] = initialPosition->second.back();
991 4 : m_context.initialPositions.erase(initialPosition);
992 4 : return;
993 : }
994 :
995 5 : void GstGenericPlayer::pushAdditionalSegmentIfRequired(GstElement *source)
996 : {
997 5 : auto currentPosition = m_context.currentPosition.find(source);
998 5 : if (m_context.currentPosition.end() == currentPosition)
999 : {
1000 4 : return;
1001 : }
1002 1 : auto initialPosition = m_context.initialPositions.find(source);
1003 1 : if (m_context.initialPositions.end() == initialPosition)
1004 : {
1005 0 : return;
1006 : }
1007 2 : if (initialPosition->second.size() == 1 && initialPosition->second.back().resetTime &&
1008 1 : currentPosition->second == initialPosition->second.back())
1009 : {
1010 1 : RIALTO_SERVER_LOG_INFO("Adding additional segment with reset_time = false");
1011 1 : SegmentData additionalSegment = initialPosition->second.back();
1012 1 : additionalSegment.resetTime = false;
1013 1 : initialPosition->second.push_back(additionalSegment);
1014 : }
1015 : }
1016 :
1017 2 : void GstGenericPlayer::setTextTrackPositionIfRequired(GstElement *source)
1018 : {
1019 2 : auto initialPosition = m_context.initialPositions.find(source);
1020 2 : if (m_context.initialPositions.end() == initialPosition)
1021 : {
1022 : // Sending initial sample not needed
1023 1 : return;
1024 : }
1025 :
1026 1 : RIALTO_SERVER_LOG_MIL("New subtitle position set %" GST_TIME_FORMAT,
1027 : GST_TIME_ARGS(initialPosition->second.back().position));
1028 1 : m_glibWrapper->gObjectSet(m_context.subtitleSink, "position",
1029 1 : static_cast<guint64>(initialPosition->second.back().position), nullptr);
1030 :
1031 1 : m_context.initialPositions.erase(initialPosition);
1032 : }
1033 :
1034 7 : bool GstGenericPlayer::reattachSource(const std::unique_ptr<IMediaPipeline::MediaSource> &source)
1035 : {
1036 7 : if (m_context.streamInfo.find(source->getType()) == m_context.streamInfo.end())
1037 : {
1038 1 : RIALTO_SERVER_LOG_ERROR("Unable to switch source, type does not exist");
1039 1 : return false;
1040 : }
1041 6 : if (source->getMimeType().empty())
1042 : {
1043 1 : RIALTO_SERVER_LOG_WARN("Skip switch audio source. Unknown mime type");
1044 1 : return false;
1045 : }
1046 5 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate> audioAttributes{createAudioAttributes(source)};
1047 5 : if (!audioAttributes)
1048 : {
1049 1 : RIALTO_SERVER_LOG_ERROR("Failed to create audio attributes");
1050 1 : return false;
1051 : }
1052 :
1053 4 : long long currentDispPts = getPosition(m_context.pipeline); // NOLINT(runtime/int)
1054 4 : GstCaps *caps{createCapsFromMediaSource(m_gstWrapper, m_glibWrapper, source)};
1055 4 : GstAppSrc *appSrc{GST_APP_SRC(m_context.streamInfo[source->getType()].appSrc)};
1056 4 : GstCaps *oldCaps = m_gstWrapper->gstAppSrcGetCaps(appSrc);
1057 4 : if ((!oldCaps) || (!m_gstWrapper->gstCapsIsEqual(caps, oldCaps)))
1058 : {
1059 3 : RIALTO_SERVER_LOG_DEBUG("Caps not equal. Perform audio track codec channel switch.");
1060 3 : int sampleAttributes{
1061 : 0}; // rdk_gstreamer_utils::performAudioTrackCodecChannelSwitch checks if this param != NULL only.
1062 3 : std::uint32_t status{0}; // must be 0 to make rdk_gstreamer_utils::performAudioTrackCodecChannelSwitch work
1063 3 : unsigned int ui32Delay{0}; // output param
1064 3 : long long audioChangeTargetPts{-1}; // NOLINT(runtime/int) output param. Set audioChangeTargetPts =
1065 : // currentDispPts in rdk_gstreamer_utils function stub
1066 3 : unsigned int audioChangeStage{0}; // Output param. Set to AUDCHG_ALIGN in rdk_gstreamer_utils function stub
1067 3 : gchar *oldCapsCStr = m_gstWrapper->gstCapsToString(oldCaps);
1068 3 : std::string oldCapsStr = std::string(oldCapsCStr);
1069 3 : m_glibWrapper->gFree(oldCapsCStr);
1070 3 : bool audioAac{oldCapsStr.find("audio/mpeg") != std::string::npos};
1071 3 : bool svpEnabled{true}; // assume always true
1072 3 : bool retVal{false}; // Output param. Set to TRUE in rdk_gstreamer_utils function stub
1073 : bool result =
1074 3 : m_rdkGstreamerUtilsWrapper
1075 6 : ->performAudioTrackCodecChannelSwitch(&m_context.playbackGroup, &sampleAttributes, &(*audioAttributes),
1076 : &status, &ui32Delay, &audioChangeTargetPts, ¤tDispPts,
1077 : &audioChangeStage,
1078 : &caps, // may fail for amlogic - that implementation changes
1079 : // this parameter, it's probably used by Netflix later
1080 3 : &audioAac, svpEnabled, GST_ELEMENT(appSrc), &retVal);
1081 :
1082 3 : if (!result || !retVal)
1083 : {
1084 3 : RIALTO_SERVER_LOG_WARN("performAudioTrackCodecChannelSwitch failed! Result: %d, retval %d", result, retVal);
1085 : }
1086 : }
1087 : else
1088 : {
1089 1 : RIALTO_SERVER_LOG_DEBUG("Skip switching audio source - caps are the same.");
1090 : }
1091 :
1092 4 : m_context.lastAudioSampleTimestamps = currentDispPts;
1093 4 : if (caps)
1094 4 : m_gstWrapper->gstCapsUnref(caps);
1095 4 : if (oldCaps)
1096 4 : m_gstWrapper->gstCapsUnref(oldCaps);
1097 :
1098 4 : return true;
1099 5 : }
1100 :
1101 0 : bool GstGenericPlayer::hasSourceType(const MediaSourceType &mediaSourceType) const
1102 : {
1103 0 : return m_context.streamInfo.find(mediaSourceType) != m_context.streamInfo.end();
1104 : }
1105 :
1106 88 : void GstGenericPlayer::scheduleNeedMediaData(GstAppSrc *src)
1107 : {
1108 88 : if (m_workerThread)
1109 : {
1110 88 : m_workerThread->enqueueTask(m_taskFactory->createNeedData(m_context, *this, src));
1111 : }
1112 : }
1113 :
1114 1 : void GstGenericPlayer::scheduleEnoughData(GstAppSrc *src)
1115 : {
1116 1 : if (m_workerThread)
1117 : {
1118 1 : m_workerThread->enqueueTask(m_taskFactory->createEnoughData(m_context, src));
1119 : }
1120 : }
1121 :
1122 3 : void GstGenericPlayer::scheduleAudioUnderflow()
1123 : {
1124 3 : if (m_workerThread)
1125 : {
1126 3 : bool underflowEnabled = m_context.isPlaying && !m_context.audioSourceRemoved;
1127 6 : m_workerThread->enqueueTask(
1128 6 : m_taskFactory->createUnderflow(m_context, *this, underflowEnabled, MediaSourceType::AUDIO));
1129 : }
1130 3 : }
1131 :
1132 2 : void GstGenericPlayer::scheduleVideoUnderflow()
1133 : {
1134 2 : if (m_workerThread)
1135 : {
1136 2 : bool underflowEnabled = m_context.isPlaying;
1137 4 : m_workerThread->enqueueTask(
1138 4 : m_taskFactory->createUnderflow(m_context, *this, underflowEnabled, MediaSourceType::VIDEO));
1139 : }
1140 2 : }
1141 :
1142 1 : void GstGenericPlayer::scheduleAllSourcesAttached()
1143 : {
1144 1 : allSourcesAttached();
1145 : }
1146 :
1147 14 : void GstGenericPlayer::cancelUnderflow(firebolt::rialto::MediaSourceType mediaSource)
1148 : {
1149 14 : auto elem = m_context.streamInfo.find(mediaSource);
1150 14 : if (elem != m_context.streamInfo.end())
1151 : {
1152 14 : StreamInfo &streamInfo = elem->second;
1153 14 : if (!streamInfo.underflowOccured)
1154 : {
1155 11 : return;
1156 : }
1157 :
1158 3 : RIALTO_SERVER_LOG_DEBUG("Cancelling %s underflow", common::convertMediaSourceType(mediaSource));
1159 3 : streamInfo.underflowOccured = false;
1160 : }
1161 : }
1162 :
1163 1 : void GstGenericPlayer::play()
1164 : {
1165 1 : if (m_workerThread)
1166 : {
1167 1 : m_workerThread->enqueueTask(m_taskFactory->createPlay(*this));
1168 : }
1169 : }
1170 :
1171 1 : void GstGenericPlayer::pause()
1172 : {
1173 1 : if (m_workerThread)
1174 : {
1175 1 : m_workerThread->enqueueTask(m_taskFactory->createPause(m_context, *this));
1176 : }
1177 : }
1178 :
1179 1 : void GstGenericPlayer::stop()
1180 : {
1181 1 : if (m_workerThread)
1182 : {
1183 1 : m_workerThread->enqueueTask(m_taskFactory->createStop(m_context, *this));
1184 : }
1185 : }
1186 :
1187 4 : bool GstGenericPlayer::changePipelineState(GstState newState)
1188 : {
1189 4 : if (!m_context.pipeline)
1190 : {
1191 1 : RIALTO_SERVER_LOG_ERROR("Change state failed - pipeline is nullptr");
1192 1 : if (m_gstPlayerClient)
1193 1 : m_gstPlayerClient->notifyPlaybackState(PlaybackState::FAILURE);
1194 1 : return false;
1195 : }
1196 3 : if (m_gstWrapper->gstElementSetState(m_context.pipeline, newState) == GST_STATE_CHANGE_FAILURE)
1197 : {
1198 1 : RIALTO_SERVER_LOG_ERROR("Change state failed - Gstreamer returned an error");
1199 1 : if (m_gstPlayerClient)
1200 1 : m_gstPlayerClient->notifyPlaybackState(PlaybackState::FAILURE);
1201 1 : return false;
1202 : }
1203 2 : return true;
1204 : }
1205 :
1206 9 : int64_t GstGenericPlayer::getPosition(GstElement *element)
1207 : {
1208 9 : if (!element)
1209 : {
1210 1 : RIALTO_SERVER_LOG_WARN("Element is null");
1211 1 : return -1;
1212 : }
1213 :
1214 8 : m_gstWrapper->gstStateLock(element);
1215 :
1216 16 : if (m_gstWrapper->gstElementGetState(element) < GST_STATE_PAUSED ||
1217 8 : (m_gstWrapper->gstElementGetStateReturn(element) == GST_STATE_CHANGE_ASYNC &&
1218 1 : m_gstWrapper->gstElementGetStateNext(element) == GST_STATE_PAUSED))
1219 : {
1220 1 : RIALTO_SERVER_LOG_WARN("Element is prerolling or in invalid state - state: %s, return: %s, next: %s",
1221 : m_gstWrapper->gstElementStateGetName(m_gstWrapper->gstElementGetState(element)),
1222 : m_gstWrapper->gstElementStateChangeReturnGetName(
1223 : m_gstWrapper->gstElementGetStateReturn(element)),
1224 : m_gstWrapper->gstElementStateGetName(m_gstWrapper->gstElementGetStateNext(element)));
1225 :
1226 1 : m_gstWrapper->gstStateUnlock(element);
1227 1 : return -1;
1228 : }
1229 7 : m_gstWrapper->gstStateUnlock(element);
1230 :
1231 7 : gint64 position = -1;
1232 7 : if (!m_gstWrapper->gstElementQueryPosition(m_context.pipeline, GST_FORMAT_TIME, &position))
1233 : {
1234 1 : RIALTO_SERVER_LOG_WARN("Failed to query position");
1235 1 : return -1;
1236 : }
1237 :
1238 6 : return position;
1239 : }
1240 :
1241 1 : void GstGenericPlayer::setVideoGeometry(int x, int y, int width, int height)
1242 : {
1243 1 : if (m_workerThread)
1244 : {
1245 2 : m_workerThread->enqueueTask(
1246 2 : m_taskFactory->createSetVideoGeometry(m_context, *this, Rectangle{x, y, width, height}));
1247 : }
1248 1 : }
1249 :
1250 1 : void GstGenericPlayer::setEos(const firebolt::rialto::MediaSourceType &type)
1251 : {
1252 1 : if (m_workerThread)
1253 : {
1254 1 : m_workerThread->enqueueTask(m_taskFactory->createEos(m_context, *this, type));
1255 : }
1256 : }
1257 :
1258 4 : bool GstGenericPlayer::setVideoSinkRectangle()
1259 : {
1260 4 : bool result = false;
1261 4 : GstElement *videoSink{getSink(MediaSourceType::VIDEO)};
1262 4 : if (videoSink)
1263 : {
1264 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "rectangle"))
1265 : {
1266 : std::string rect =
1267 4 : std::to_string(m_context.pendingGeometry.x) + ',' + std::to_string(m_context.pendingGeometry.y) + ',' +
1268 6 : std::to_string(m_context.pendingGeometry.width) + ',' + std::to_string(m_context.pendingGeometry.height);
1269 2 : m_glibWrapper->gObjectSet(videoSink, "rectangle", rect.c_str(), nullptr);
1270 2 : m_context.pendingGeometry.clear();
1271 2 : result = true;
1272 : }
1273 : else
1274 : {
1275 1 : RIALTO_SERVER_LOG_ERROR("Failed to set the video rectangle");
1276 : }
1277 3 : m_gstWrapper->gstObjectUnref(videoSink);
1278 : }
1279 :
1280 4 : return result;
1281 : }
1282 :
1283 3 : bool GstGenericPlayer::setImmediateOutput()
1284 : {
1285 3 : bool result{false};
1286 3 : if (m_context.pendingImmediateOutputForVideo.has_value())
1287 : {
1288 3 : GstElement *sink{getSink(MediaSourceType::VIDEO)};
1289 3 : if (sink)
1290 : {
1291 2 : bool immediateOutput{m_context.pendingImmediateOutputForVideo.value()};
1292 2 : RIALTO_SERVER_LOG_DEBUG("Set immediate-output to %s", immediateOutput ? "TRUE" : "FALSE");
1293 :
1294 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "immediate-output"))
1295 : {
1296 1 : gboolean immediateOutputGboolean{immediateOutput ? TRUE : FALSE};
1297 1 : m_glibWrapper->gObjectSet(sink, "immediate-output", immediateOutputGboolean, nullptr);
1298 1 : result = true;
1299 : }
1300 : else
1301 : {
1302 1 : RIALTO_SERVER_LOG_ERROR("Failed to set immediate-output property on sink '%s'", GST_ELEMENT_NAME(sink));
1303 : }
1304 2 : m_context.pendingImmediateOutputForVideo.reset();
1305 2 : m_gstWrapper->gstObjectUnref(sink);
1306 : }
1307 : else
1308 : {
1309 1 : RIALTO_SERVER_LOG_DEBUG("Pending an immediate-output, sink is NULL");
1310 : }
1311 : }
1312 3 : return result;
1313 : }
1314 :
1315 4 : bool GstGenericPlayer::setShowVideoWindow()
1316 : {
1317 4 : if (!m_context.pendingShowVideoWindow.has_value())
1318 : {
1319 1 : RIALTO_SERVER_LOG_WARN("No show video window value to be set. Aborting...");
1320 1 : return false;
1321 : }
1322 :
1323 3 : GstElement *videoSink{getSink(MediaSourceType::VIDEO)};
1324 3 : if (!videoSink)
1325 : {
1326 1 : RIALTO_SERVER_LOG_DEBUG("Setting show video window queued. Video sink is NULL");
1327 1 : return false;
1328 : }
1329 2 : bool result{false};
1330 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "show-video-window"))
1331 : {
1332 1 : m_glibWrapper->gObjectSet(videoSink, "show-video-window", m_context.pendingShowVideoWindow.value(), nullptr);
1333 1 : result = true;
1334 : }
1335 : else
1336 : {
1337 1 : RIALTO_SERVER_LOG_ERROR("Setting show video window failed. Property does not exist");
1338 : }
1339 2 : m_context.pendingShowVideoWindow.reset();
1340 2 : m_gstWrapper->gstObjectUnref(GST_OBJECT(videoSink));
1341 2 : return result;
1342 : }
1343 :
1344 4 : bool GstGenericPlayer::setLowLatency()
1345 : {
1346 4 : bool result{false};
1347 4 : if (m_context.pendingLowLatency.has_value())
1348 : {
1349 4 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1350 4 : if (sink)
1351 : {
1352 3 : bool lowLatency{m_context.pendingLowLatency.value()};
1353 3 : RIALTO_SERVER_LOG_DEBUG("Set low-latency to %s", lowLatency ? "TRUE" : "FALSE");
1354 :
1355 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "low-latency"))
1356 : {
1357 2 : gboolean lowLatencyGboolean{lowLatency ? TRUE : FALSE};
1358 2 : m_glibWrapper->gObjectSet(sink, "low-latency", lowLatencyGboolean, nullptr);
1359 2 : result = true;
1360 : }
1361 : else
1362 : {
1363 1 : RIALTO_SERVER_LOG_ERROR("Failed to set low-latency property on sink '%s'", GST_ELEMENT_NAME(sink));
1364 : }
1365 3 : m_context.pendingLowLatency.reset();
1366 3 : m_gstWrapper->gstObjectUnref(sink);
1367 : }
1368 : else
1369 : {
1370 1 : RIALTO_SERVER_LOG_DEBUG("Pending low-latency, sink is NULL");
1371 : }
1372 : }
1373 4 : return result;
1374 : }
1375 :
1376 3 : bool GstGenericPlayer::setSync()
1377 : {
1378 3 : bool result{false};
1379 3 : if (m_context.pendingSync.has_value())
1380 : {
1381 3 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1382 3 : if (sink)
1383 : {
1384 2 : bool sync{m_context.pendingSync.value()};
1385 2 : RIALTO_SERVER_LOG_DEBUG("Set sync to %s", sync ? "TRUE" : "FALSE");
1386 :
1387 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "sync"))
1388 : {
1389 1 : gboolean syncGboolean{sync ? TRUE : FALSE};
1390 1 : m_glibWrapper->gObjectSet(sink, "sync", syncGboolean, nullptr);
1391 1 : result = true;
1392 : }
1393 : else
1394 : {
1395 1 : RIALTO_SERVER_LOG_ERROR("Failed to set sync property on sink '%s'", GST_ELEMENT_NAME(sink));
1396 : }
1397 2 : m_context.pendingSync.reset();
1398 2 : m_gstWrapper->gstObjectUnref(sink);
1399 : }
1400 : else
1401 : {
1402 1 : RIALTO_SERVER_LOG_DEBUG("Pending sync, sink is NULL");
1403 : }
1404 : }
1405 3 : return result;
1406 : }
1407 :
1408 3 : bool GstGenericPlayer::setSyncOff()
1409 : {
1410 3 : bool result{false};
1411 3 : if (m_context.pendingSyncOff.has_value())
1412 : {
1413 3 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1414 3 : if (decoder)
1415 : {
1416 2 : bool syncOff{m_context.pendingSyncOff.value()};
1417 2 : RIALTO_SERVER_LOG_DEBUG("Set sync-off to %s", syncOff ? "TRUE" : "FALSE");
1418 :
1419 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "sync-off"))
1420 : {
1421 1 : gboolean syncOffGboolean{decoder ? TRUE : FALSE};
1422 1 : m_glibWrapper->gObjectSet(decoder, "sync-off", syncOffGboolean, nullptr);
1423 1 : result = true;
1424 : }
1425 : else
1426 : {
1427 1 : RIALTO_SERVER_LOG_ERROR("Failed to set sync-off property on decoder '%s'", GST_ELEMENT_NAME(decoder));
1428 : }
1429 2 : m_context.pendingSyncOff.reset();
1430 2 : m_gstWrapper->gstObjectUnref(decoder);
1431 : }
1432 : else
1433 : {
1434 1 : RIALTO_SERVER_LOG_DEBUG("Pending sync-off, decoder is NULL");
1435 : }
1436 : }
1437 3 : return result;
1438 : }
1439 :
1440 6 : bool GstGenericPlayer::setStreamSyncMode(const MediaSourceType &type)
1441 : {
1442 6 : bool result{false};
1443 6 : int32_t streamSyncMode{0};
1444 : {
1445 6 : std::unique_lock lock{m_context.propertyMutex};
1446 6 : if (m_context.pendingStreamSyncMode.find(type) == m_context.pendingStreamSyncMode.end())
1447 : {
1448 0 : return false;
1449 : }
1450 6 : streamSyncMode = m_context.pendingStreamSyncMode[type];
1451 : }
1452 6 : if (MediaSourceType::AUDIO == type)
1453 : {
1454 3 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1455 3 : if (!decoder)
1456 : {
1457 1 : RIALTO_SERVER_LOG_DEBUG("Pending stream-sync-mode, decoder is NULL");
1458 1 : return false;
1459 : }
1460 :
1461 2 : RIALTO_SERVER_LOG_DEBUG("Set stream-sync-mode to %d", streamSyncMode);
1462 :
1463 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "stream-sync-mode"))
1464 : {
1465 1 : gint streamSyncModeGint{static_cast<gint>(streamSyncMode)};
1466 1 : m_glibWrapper->gObjectSet(decoder, "stream-sync-mode", streamSyncModeGint, nullptr);
1467 1 : result = true;
1468 : }
1469 : else
1470 : {
1471 1 : RIALTO_SERVER_LOG_ERROR("Failed to set stream-sync-mode property on decoder '%s'", GST_ELEMENT_NAME(decoder));
1472 : }
1473 2 : m_gstWrapper->gstObjectUnref(decoder);
1474 2 : std::unique_lock lock{m_context.propertyMutex};
1475 2 : m_context.pendingStreamSyncMode.erase(type);
1476 : }
1477 3 : else if (MediaSourceType::VIDEO == type)
1478 : {
1479 3 : GstElement *parser = getParser(MediaSourceType::VIDEO);
1480 3 : if (!parser)
1481 : {
1482 1 : RIALTO_SERVER_LOG_DEBUG("Pending syncmode-streaming, parser is NULL");
1483 1 : return false;
1484 : }
1485 :
1486 2 : gboolean streamSyncModeBoolean{static_cast<gboolean>(streamSyncMode)};
1487 2 : RIALTO_SERVER_LOG_DEBUG("Set syncmode-streaming to %d", streamSyncMode);
1488 :
1489 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(parser), "syncmode-streaming"))
1490 : {
1491 1 : m_glibWrapper->gObjectSet(parser, "syncmode-streaming", streamSyncModeBoolean, nullptr);
1492 1 : result = true;
1493 : }
1494 : else
1495 : {
1496 1 : RIALTO_SERVER_LOG_ERROR("Failed to set syncmode-streaming property on parser '%s'", GST_ELEMENT_NAME(parser));
1497 : }
1498 2 : m_gstWrapper->gstObjectUnref(parser);
1499 2 : std::unique_lock lock{m_context.propertyMutex};
1500 2 : m_context.pendingStreamSyncMode.erase(type);
1501 : }
1502 4 : return result;
1503 : }
1504 :
1505 3 : bool GstGenericPlayer::setRenderFrame()
1506 : {
1507 3 : bool result{false};
1508 3 : if (m_context.pendingRenderFrame)
1509 : {
1510 5 : static const std::string kStepOnPrerollPropertyName = "frame-step-on-preroll";
1511 3 : GstElement *sink{getSink(MediaSourceType::VIDEO)};
1512 3 : if (sink)
1513 : {
1514 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), kStepOnPrerollPropertyName.c_str()))
1515 : {
1516 1 : RIALTO_SERVER_LOG_INFO("Rendering preroll");
1517 :
1518 1 : m_glibWrapper->gObjectSet(sink, kStepOnPrerollPropertyName.c_str(), 1, nullptr);
1519 1 : m_gstWrapper->gstElementSendEvent(sink, m_gstWrapper->gstEventNewStep(GST_FORMAT_BUFFERS, 1, 1.0, true,
1520 : false));
1521 1 : m_glibWrapper->gObjectSet(sink, kStepOnPrerollPropertyName.c_str(), 0, nullptr);
1522 1 : result = true;
1523 : }
1524 : else
1525 : {
1526 1 : RIALTO_SERVER_LOG_ERROR("Video sink doesn't have property `%s`", kStepOnPrerollPropertyName.c_str());
1527 : }
1528 2 : m_gstWrapper->gstObjectUnref(sink);
1529 2 : m_context.pendingRenderFrame = false;
1530 : }
1531 : else
1532 : {
1533 1 : RIALTO_SERVER_LOG_DEBUG("Pending render frame, sink is NULL");
1534 : }
1535 : }
1536 3 : return result;
1537 : }
1538 :
1539 3 : bool GstGenericPlayer::setBufferingLimit()
1540 : {
1541 3 : bool result{false};
1542 3 : guint bufferingLimit{0};
1543 : {
1544 3 : std::unique_lock lock{m_context.propertyMutex};
1545 3 : if (!m_context.pendingBufferingLimit.has_value())
1546 : {
1547 0 : return false;
1548 : }
1549 3 : bufferingLimit = static_cast<guint>(m_context.pendingBufferingLimit.value());
1550 : }
1551 :
1552 3 : GstElement *decoder{getDecoder(MediaSourceType::AUDIO)};
1553 3 : if (decoder)
1554 : {
1555 2 : RIALTO_SERVER_LOG_DEBUG("Set limit-buffering-ms to %u", bufferingLimit);
1556 :
1557 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "limit-buffering-ms"))
1558 : {
1559 1 : m_glibWrapper->gObjectSet(decoder, "limit-buffering-ms", bufferingLimit, nullptr);
1560 1 : result = true;
1561 : }
1562 : else
1563 : {
1564 1 : RIALTO_SERVER_LOG_ERROR("Failed to set limit-buffering-ms property on decoder '%s'",
1565 : GST_ELEMENT_NAME(decoder));
1566 : }
1567 2 : m_gstWrapper->gstObjectUnref(decoder);
1568 2 : std::unique_lock lock{m_context.propertyMutex};
1569 2 : m_context.pendingBufferingLimit.reset();
1570 : }
1571 : else
1572 : {
1573 1 : RIALTO_SERVER_LOG_DEBUG("Pending limit-buffering-ms, decoder is NULL");
1574 : }
1575 3 : return result;
1576 : }
1577 :
1578 2 : bool GstGenericPlayer::setUseBuffering()
1579 : {
1580 2 : std::unique_lock lock{m_context.propertyMutex};
1581 2 : if (m_context.pendingUseBuffering.has_value())
1582 : {
1583 2 : if (m_context.playbackGroup.m_curAudioDecodeBin)
1584 : {
1585 1 : gboolean useBufferingGboolean{m_context.pendingUseBuffering.value() ? TRUE : FALSE};
1586 1 : RIALTO_SERVER_LOG_DEBUG("Set use-buffering to %d", useBufferingGboolean);
1587 1 : m_glibWrapper->gObjectSet(m_context.playbackGroup.m_curAudioDecodeBin, "use-buffering",
1588 : useBufferingGboolean, nullptr);
1589 1 : m_context.pendingUseBuffering.reset();
1590 1 : return true;
1591 : }
1592 : else
1593 : {
1594 1 : RIALTO_SERVER_LOG_DEBUG("Pending use-buffering, decodebin is NULL");
1595 : }
1596 : }
1597 1 : return false;
1598 2 : }
1599 :
1600 8 : bool GstGenericPlayer::setWesterossinkSecondaryVideo()
1601 : {
1602 8 : bool result = false;
1603 8 : GstElementFactory *factory = m_gstWrapper->gstElementFactoryFind("westerossink");
1604 8 : if (factory)
1605 : {
1606 7 : GstElement *videoSink = m_gstWrapper->gstElementFactoryCreate(factory, nullptr);
1607 7 : if (videoSink)
1608 : {
1609 5 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "res-usage"))
1610 : {
1611 4 : m_glibWrapper->gObjectSet(videoSink, "res-usage", 0x0u, nullptr);
1612 4 : m_glibWrapper->gObjectSet(m_context.pipeline, "video-sink", videoSink, nullptr);
1613 4 : result = true;
1614 : }
1615 : else
1616 : {
1617 1 : RIALTO_SERVER_LOG_ERROR("Failed to set the westerossink res-usage");
1618 1 : m_gstWrapper->gstObjectUnref(GST_OBJECT(videoSink));
1619 : }
1620 : }
1621 : else
1622 : {
1623 2 : RIALTO_SERVER_LOG_ERROR("Failed to create the westerossink");
1624 : }
1625 :
1626 7 : m_gstWrapper->gstObjectUnref(GST_OBJECT(factory));
1627 : }
1628 : else
1629 : {
1630 : // No westeros sink
1631 1 : result = true;
1632 : }
1633 :
1634 8 : return result;
1635 : }
1636 :
1637 8 : bool GstGenericPlayer::setErmContext()
1638 : {
1639 8 : bool result = false;
1640 8 : GstContext *context = m_gstWrapper->gstContextNew("erm", false);
1641 8 : if (context)
1642 : {
1643 6 : GstStructure *contextStructure = m_gstWrapper->gstContextWritableStructure(context);
1644 6 : if (contextStructure)
1645 : {
1646 5 : m_gstWrapper->gstStructureSet(contextStructure, "res-usage", G_TYPE_UINT, 0x0u, nullptr);
1647 5 : m_gstWrapper->gstElementSetContext(GST_ELEMENT(m_context.pipeline), context);
1648 5 : result = true;
1649 : }
1650 : else
1651 : {
1652 1 : RIALTO_SERVER_LOG_ERROR("Failed to create the erm structure");
1653 : }
1654 6 : m_gstWrapper->gstContextUnref(context);
1655 : }
1656 : else
1657 : {
1658 2 : RIALTO_SERVER_LOG_ERROR("Failed to create the erm context");
1659 : }
1660 :
1661 8 : return result;
1662 : }
1663 :
1664 6 : void GstGenericPlayer::startPositionReportingAndCheckAudioUnderflowTimer()
1665 : {
1666 6 : if (m_positionReportingAndCheckAudioUnderflowTimer && m_positionReportingAndCheckAudioUnderflowTimer->isActive())
1667 : {
1668 1 : return;
1669 : }
1670 :
1671 15 : m_positionReportingAndCheckAudioUnderflowTimer = m_timerFactory->createTimer(
1672 : kPositionReportTimerMs,
1673 10 : [this]()
1674 : {
1675 1 : if (m_workerThread)
1676 : {
1677 1 : m_workerThread->enqueueTask(m_taskFactory->createReportPosition(m_context, *this));
1678 1 : m_workerThread->enqueueTask(m_taskFactory->createCheckAudioUnderflow(m_context, *this));
1679 : }
1680 1 : },
1681 5 : firebolt::rialto::common::TimerType::PERIODIC);
1682 : }
1683 :
1684 4 : void GstGenericPlayer::stopPositionReportingAndCheckAudioUnderflowTimer()
1685 : {
1686 4 : if (m_positionReportingAndCheckAudioUnderflowTimer && m_positionReportingAndCheckAudioUnderflowTimer->isActive())
1687 : {
1688 1 : m_positionReportingAndCheckAudioUnderflowTimer->cancel();
1689 1 : m_positionReportingAndCheckAudioUnderflowTimer.reset();
1690 : }
1691 4 : }
1692 :
1693 0 : void GstGenericPlayer::startSubtitleClockResyncTimer()
1694 : {
1695 0 : if (m_subtitleClockResyncTimer && m_subtitleClockResyncTimer->isActive())
1696 : {
1697 0 : return;
1698 : }
1699 :
1700 0 : m_subtitleClockResyncTimer = m_timerFactory->createTimer(
1701 : kSubtitleClockResyncInterval,
1702 0 : [this]()
1703 : {
1704 0 : if (m_workerThread)
1705 : {
1706 0 : m_workerThread->enqueueTask(m_taskFactory->createSynchroniseSubtitleClock(m_context, *this));
1707 : }
1708 0 : },
1709 0 : firebolt::rialto::common::TimerType::PERIODIC);
1710 : }
1711 :
1712 0 : void GstGenericPlayer::stopSubtitleClockResyncTimer()
1713 : {
1714 0 : if (m_subtitleClockResyncTimer && m_subtitleClockResyncTimer->isActive())
1715 : {
1716 0 : m_subtitleClockResyncTimer->cancel();
1717 0 : m_subtitleClockResyncTimer.reset();
1718 : }
1719 : }
1720 :
1721 2 : void GstGenericPlayer::stopWorkerThread()
1722 : {
1723 2 : if (m_workerThread)
1724 : {
1725 2 : m_workerThread->stop();
1726 : }
1727 : }
1728 :
1729 0 : void GstGenericPlayer::setPendingPlaybackRate()
1730 : {
1731 0 : RIALTO_SERVER_LOG_INFO("Setting pending playback rate");
1732 0 : setPlaybackRate(m_context.pendingPlaybackRate);
1733 : }
1734 :
1735 1 : void GstGenericPlayer::renderFrame()
1736 : {
1737 1 : if (m_workerThread)
1738 : {
1739 1 : m_workerThread->enqueueTask(m_taskFactory->createRenderFrame(m_context, *this));
1740 : }
1741 : }
1742 :
1743 18 : void GstGenericPlayer::setVolume(double targetVolume, uint32_t volumeDuration, firebolt::rialto::EaseType easeType)
1744 : {
1745 18 : if (m_workerThread)
1746 : {
1747 36 : m_workerThread->enqueueTask(
1748 36 : m_taskFactory->createSetVolume(m_context, *this, targetVolume, volumeDuration, easeType));
1749 : }
1750 18 : }
1751 :
1752 3 : bool GstGenericPlayer::getVolume(double ¤tVolume)
1753 : {
1754 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
1755 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
1756 3 : if (!m_context.pipeline)
1757 : {
1758 0 : return false;
1759 : }
1760 :
1761 : // NOTE: No gstreamer documentation for "fade-volume" could be found at the time this code was written.
1762 : // Therefore the author performed several tests on a supported platform (Flex2) to determine the behaviour of this property.
1763 : // The code has been written to be backwardly compatible on platforms that don't have this property.
1764 : // The observed behaviour was:
1765 : // - if the returned fade volume is negative then audio-fade is not active. In this case the usual technique
1766 : // to find volume in the pipeline works and is used.
1767 : // - if the returned fade volume is positive then audio-fade is active. In this case the returned fade volume
1768 : // directly returns the current volume level 0=min to 100=max (and the pipeline's current volume level is
1769 : // meaningless and doesn't contribute in this case).
1770 3 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1771 5 : if (m_context.audioFadeEnabled && sink &&
1772 2 : m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "fade-volume"))
1773 : {
1774 2 : gint fadeVolume{-100};
1775 2 : m_glibWrapper->gObjectGet(sink, "fade-volume", &fadeVolume, NULL);
1776 2 : if (fadeVolume < 0)
1777 : {
1778 1 : currentVolume = m_gstWrapper->gstStreamVolumeGetVolume(GST_STREAM_VOLUME(m_context.pipeline),
1779 : GST_STREAM_VOLUME_FORMAT_LINEAR);
1780 1 : RIALTO_SERVER_LOG_INFO("Fade volume is negative, using volume from pipeline: %f", currentVolume);
1781 : }
1782 : else
1783 : {
1784 1 : currentVolume = static_cast<double>(fadeVolume) / 100.0;
1785 1 : RIALTO_SERVER_LOG_INFO("Fade volume is supported: %f", currentVolume);
1786 : }
1787 : }
1788 : else
1789 : {
1790 1 : currentVolume = m_gstWrapper->gstStreamVolumeGetVolume(GST_STREAM_VOLUME(m_context.pipeline),
1791 : GST_STREAM_VOLUME_FORMAT_LINEAR);
1792 1 : RIALTO_SERVER_LOG_INFO("Fade volume is not supported, using volume from pipeline: %f", currentVolume);
1793 : }
1794 :
1795 3 : if (sink)
1796 2 : m_gstWrapper->gstObjectUnref(sink);
1797 :
1798 3 : return true;
1799 : }
1800 :
1801 1 : void GstGenericPlayer::setMute(const MediaSourceType &mediaSourceType, bool mute)
1802 : {
1803 1 : if (m_workerThread)
1804 : {
1805 1 : m_workerThread->enqueueTask(m_taskFactory->createSetMute(m_context, *this, mediaSourceType, mute));
1806 : }
1807 : }
1808 :
1809 5 : bool GstGenericPlayer::getMute(const MediaSourceType &mediaSourceType, bool &mute)
1810 : {
1811 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
1812 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
1813 5 : if (mediaSourceType == MediaSourceType::SUBTITLE)
1814 : {
1815 2 : if (!m_context.subtitleSink)
1816 : {
1817 1 : RIALTO_SERVER_LOG_ERROR("There is no subtitle sink");
1818 1 : return false;
1819 : }
1820 1 : gboolean muteValue{FALSE};
1821 1 : m_glibWrapper->gObjectGet(m_context.subtitleSink, "mute", &muteValue, nullptr);
1822 1 : mute = muteValue;
1823 : }
1824 3 : else if (mediaSourceType == MediaSourceType::AUDIO)
1825 : {
1826 2 : if (!m_context.pipeline)
1827 : {
1828 1 : return false;
1829 : }
1830 1 : mute = m_gstWrapper->gstStreamVolumeGetMute(GST_STREAM_VOLUME(m_context.pipeline));
1831 : }
1832 : else
1833 : {
1834 1 : RIALTO_SERVER_LOG_ERROR("Getting mute for type %s unsupported", common::convertMediaSourceType(mediaSourceType));
1835 1 : return false;
1836 : }
1837 :
1838 2 : return true;
1839 : }
1840 :
1841 1 : bool GstGenericPlayer::isAsync(const MediaSourceType &mediaSourceType) const
1842 : {
1843 1 : GstElement *sink = getSink(mediaSourceType);
1844 1 : if (!sink)
1845 : {
1846 0 : RIALTO_SERVER_LOG_WARN("Sink not found for %s", common::convertMediaSourceType(mediaSourceType));
1847 0 : return true; // Our sinks are async by default
1848 : }
1849 1 : gboolean returnValue{TRUE};
1850 1 : m_glibWrapper->gObjectGet(sink, "async", &returnValue, nullptr);
1851 1 : m_gstWrapper->gstObjectUnref(sink);
1852 1 : return returnValue == TRUE;
1853 : }
1854 :
1855 1 : void GstGenericPlayer::setTextTrackIdentifier(const std::string &textTrackIdentifier)
1856 : {
1857 1 : if (m_workerThread)
1858 : {
1859 1 : m_workerThread->enqueueTask(m_taskFactory->createSetTextTrackIdentifier(m_context, textTrackIdentifier));
1860 : }
1861 : }
1862 :
1863 3 : bool GstGenericPlayer::getTextTrackIdentifier(std::string &textTrackIdentifier)
1864 : {
1865 3 : if (!m_context.subtitleSink)
1866 : {
1867 1 : RIALTO_SERVER_LOG_ERROR("There is no subtitle sink");
1868 1 : return false;
1869 : }
1870 :
1871 2 : gchar *identifier = nullptr;
1872 2 : m_glibWrapper->gObjectGet(m_context.subtitleSink, "text-track-identifier", &identifier, nullptr);
1873 :
1874 2 : if (identifier)
1875 : {
1876 1 : textTrackIdentifier = identifier;
1877 1 : m_glibWrapper->gFree(identifier);
1878 1 : return true;
1879 : }
1880 : else
1881 : {
1882 1 : RIALTO_SERVER_LOG_ERROR("Failed to get text track identifier");
1883 1 : return false;
1884 : }
1885 : }
1886 :
1887 1 : bool GstGenericPlayer::setLowLatency(bool lowLatency)
1888 : {
1889 1 : if (m_workerThread)
1890 : {
1891 1 : m_workerThread->enqueueTask(m_taskFactory->createSetLowLatency(m_context, *this, lowLatency));
1892 : }
1893 1 : return true;
1894 : }
1895 :
1896 1 : bool GstGenericPlayer::setSync(bool sync)
1897 : {
1898 1 : if (m_workerThread)
1899 : {
1900 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSync(m_context, *this, sync));
1901 : }
1902 1 : return true;
1903 : }
1904 :
1905 4 : bool GstGenericPlayer::getSync(bool &sync)
1906 : {
1907 4 : bool returnValue{false};
1908 4 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1909 4 : if (sink)
1910 : {
1911 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "sync"))
1912 : {
1913 1 : m_glibWrapper->gObjectGet(sink, "sync", &sync, nullptr);
1914 1 : returnValue = true;
1915 : }
1916 : else
1917 : {
1918 1 : RIALTO_SERVER_LOG_ERROR("Sync not supported in sink '%s'", GST_ELEMENT_NAME(sink));
1919 : }
1920 2 : m_gstWrapper->gstObjectUnref(sink);
1921 : }
1922 2 : else if (m_context.pendingSync.has_value())
1923 : {
1924 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
1925 1 : sync = m_context.pendingSync.value();
1926 1 : returnValue = true;
1927 : }
1928 : else
1929 : {
1930 : // We dont know the default setting on the sync, so return failure here
1931 1 : RIALTO_SERVER_LOG_WARN("No audio sink attached or queued value");
1932 : }
1933 :
1934 4 : return returnValue;
1935 : }
1936 :
1937 1 : bool GstGenericPlayer::setSyncOff(bool syncOff)
1938 : {
1939 1 : if (m_workerThread)
1940 : {
1941 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSyncOff(m_context, *this, syncOff));
1942 : }
1943 1 : return true;
1944 : }
1945 :
1946 1 : bool GstGenericPlayer::setStreamSyncMode(const MediaSourceType &mediaSourceType, int32_t streamSyncMode)
1947 : {
1948 1 : if (m_workerThread)
1949 : {
1950 2 : m_workerThread->enqueueTask(
1951 2 : m_taskFactory->createSetStreamSyncMode(m_context, *this, mediaSourceType, streamSyncMode));
1952 : }
1953 1 : return true;
1954 : }
1955 :
1956 5 : bool GstGenericPlayer::getStreamSyncMode(int32_t &streamSyncMode)
1957 : {
1958 5 : bool returnValue{false};
1959 5 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1960 5 : if (decoder && m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "stream-sync-mode"))
1961 : {
1962 2 : m_glibWrapper->gObjectGet(decoder, "stream-sync-mode", &streamSyncMode, nullptr);
1963 2 : returnValue = true;
1964 : }
1965 : else
1966 : {
1967 3 : std::unique_lock lock{m_context.propertyMutex};
1968 3 : if (m_context.pendingStreamSyncMode.find(MediaSourceType::AUDIO) != m_context.pendingStreamSyncMode.end())
1969 : {
1970 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
1971 1 : streamSyncMode = m_context.pendingStreamSyncMode[MediaSourceType::AUDIO];
1972 1 : returnValue = true;
1973 : }
1974 : else
1975 : {
1976 2 : RIALTO_SERVER_LOG_ERROR("Stream sync mode not supported in decoder '%s'",
1977 : (decoder ? GST_ELEMENT_NAME(decoder) : "null"));
1978 : }
1979 3 : }
1980 :
1981 5 : if (decoder)
1982 3 : m_gstWrapper->gstObjectUnref(GST_OBJECT(decoder));
1983 :
1984 5 : return returnValue;
1985 : }
1986 :
1987 1 : void GstGenericPlayer::ping(std::unique_ptr<IHeartbeatHandler> &&heartbeatHandler)
1988 : {
1989 1 : if (m_workerThread)
1990 : {
1991 1 : m_workerThread->enqueueTask(m_taskFactory->createPing(std::move(heartbeatHandler)));
1992 : }
1993 : }
1994 :
1995 1 : void GstGenericPlayer::flush(const MediaSourceType &mediaSourceType, bool resetTime, bool &async)
1996 : {
1997 1 : if (m_workerThread)
1998 : {
1999 1 : async = isAsync(mediaSourceType);
2000 1 : m_flushWatcher->setFlushing(mediaSourceType, async);
2001 1 : m_workerThread->enqueueTask(m_taskFactory->createFlush(m_context, *this, mediaSourceType, resetTime));
2002 : }
2003 : }
2004 :
2005 1 : void GstGenericPlayer::setSourcePosition(const MediaSourceType &mediaSourceType, int64_t position, bool resetTime,
2006 : double appliedRate, uint64_t stopPosition)
2007 : {
2008 1 : if (m_workerThread)
2009 : {
2010 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSourcePosition(m_context, mediaSourceType, position,
2011 : resetTime, appliedRate, stopPosition));
2012 : }
2013 : }
2014 :
2015 0 : void GstGenericPlayer::setSubtitleOffset(int64_t position)
2016 : {
2017 0 : if (m_workerThread)
2018 : {
2019 0 : m_workerThread->enqueueTask(m_taskFactory->createSetSubtitleOffset(m_context, position));
2020 : }
2021 : }
2022 :
2023 1 : void GstGenericPlayer::processAudioGap(int64_t position, uint32_t duration, int64_t discontinuityGap, bool audioAac)
2024 : {
2025 1 : if (m_workerThread)
2026 : {
2027 2 : m_workerThread->enqueueTask(
2028 2 : m_taskFactory->createProcessAudioGap(m_context, position, duration, discontinuityGap, audioAac));
2029 : }
2030 1 : }
2031 :
2032 1 : void GstGenericPlayer::setBufferingLimit(uint32_t limitBufferingMs)
2033 : {
2034 1 : if (m_workerThread)
2035 : {
2036 1 : m_workerThread->enqueueTask(m_taskFactory->createSetBufferingLimit(m_context, *this, limitBufferingMs));
2037 : }
2038 : }
2039 :
2040 5 : bool GstGenericPlayer::getBufferingLimit(uint32_t &limitBufferingMs)
2041 : {
2042 5 : bool returnValue{false};
2043 5 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
2044 5 : if (decoder && m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "limit-buffering-ms"))
2045 : {
2046 2 : m_glibWrapper->gObjectGet(decoder, "limit-buffering-ms", &limitBufferingMs, nullptr);
2047 2 : returnValue = true;
2048 : }
2049 : else
2050 : {
2051 3 : std::unique_lock lock{m_context.propertyMutex};
2052 3 : if (m_context.pendingBufferingLimit.has_value())
2053 : {
2054 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
2055 1 : limitBufferingMs = m_context.pendingBufferingLimit.value();
2056 1 : returnValue = true;
2057 : }
2058 : else
2059 : {
2060 2 : RIALTO_SERVER_LOG_ERROR("buffering limit not supported in decoder '%s'",
2061 : (decoder ? GST_ELEMENT_NAME(decoder) : "null"));
2062 : }
2063 3 : }
2064 :
2065 5 : if (decoder)
2066 3 : m_gstWrapper->gstObjectUnref(GST_OBJECT(decoder));
2067 :
2068 5 : return returnValue;
2069 : }
2070 :
2071 1 : void GstGenericPlayer::setUseBuffering(bool useBuffering)
2072 : {
2073 1 : if (m_workerThread)
2074 : {
2075 1 : m_workerThread->enqueueTask(m_taskFactory->createSetUseBuffering(m_context, *this, useBuffering));
2076 : }
2077 : }
2078 :
2079 3 : bool GstGenericPlayer::getUseBuffering(bool &useBuffering)
2080 : {
2081 3 : if (m_context.playbackGroup.m_curAudioDecodeBin)
2082 : {
2083 1 : m_glibWrapper->gObjectGet(m_context.playbackGroup.m_curAudioDecodeBin, "use-buffering", &useBuffering, nullptr);
2084 1 : return true;
2085 : }
2086 : else
2087 : {
2088 2 : std::unique_lock lock{m_context.propertyMutex};
2089 2 : if (m_context.pendingUseBuffering.has_value())
2090 : {
2091 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
2092 1 : useBuffering = m_context.pendingUseBuffering.value();
2093 1 : return true;
2094 : }
2095 2 : }
2096 1 : return false;
2097 : }
2098 :
2099 1 : void GstGenericPlayer::switchSource(const std::unique_ptr<IMediaPipeline::MediaSource> &mediaSource)
2100 : {
2101 1 : if (m_workerThread)
2102 : {
2103 1 : m_workerThread->enqueueTask(m_taskFactory->createSwitchSource(*this, mediaSource));
2104 : }
2105 : }
2106 :
2107 1 : void GstGenericPlayer::handleBusMessage(GstMessage *message)
2108 : {
2109 1 : m_workerThread->enqueueTask(m_taskFactory->createHandleBusMessage(m_context, *this, message, *m_flushWatcher));
2110 : }
2111 :
2112 1 : void GstGenericPlayer::updatePlaybackGroup(GstElement *typefind, const GstCaps *caps)
2113 : {
2114 1 : m_workerThread->enqueueTask(m_taskFactory->createUpdatePlaybackGroup(m_context, *this, typefind, caps));
2115 : }
2116 :
2117 3 : void GstGenericPlayer::addAutoVideoSinkChild(GObject *object)
2118 : {
2119 : // Only add children that are sinks
2120 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2121 : {
2122 2 : RIALTO_SERVER_LOG_DEBUG("Store AutoVideoSink child sink");
2123 :
2124 2 : if (m_context.autoVideoChildSink && m_context.autoVideoChildSink != GST_ELEMENT(object))
2125 : {
2126 1 : RIALTO_SERVER_LOG_MIL("AutoVideoSink child is been overwritten");
2127 : }
2128 2 : m_context.autoVideoChildSink = GST_ELEMENT(object);
2129 : }
2130 3 : }
2131 :
2132 3 : void GstGenericPlayer::addAutoAudioSinkChild(GObject *object)
2133 : {
2134 : // Only add children that are sinks
2135 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2136 : {
2137 2 : RIALTO_SERVER_LOG_DEBUG("Store AutoAudioSink child sink");
2138 :
2139 2 : if (m_context.autoAudioChildSink && m_context.autoAudioChildSink != GST_ELEMENT(object))
2140 : {
2141 1 : RIALTO_SERVER_LOG_MIL("AutoAudioSink child is been overwritten");
2142 : }
2143 2 : m_context.autoAudioChildSink = GST_ELEMENT(object);
2144 : }
2145 3 : }
2146 :
2147 3 : void GstGenericPlayer::removeAutoVideoSinkChild(GObject *object)
2148 : {
2149 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2150 : {
2151 3 : RIALTO_SERVER_LOG_DEBUG("Remove AutoVideoSink child sink");
2152 :
2153 3 : if (m_context.autoVideoChildSink && m_context.autoVideoChildSink != GST_ELEMENT(object))
2154 : {
2155 1 : RIALTO_SERVER_LOG_MIL("AutoVideoSink child sink is not the same as the one stored");
2156 1 : return;
2157 : }
2158 :
2159 2 : m_context.autoVideoChildSink = nullptr;
2160 : }
2161 : }
2162 :
2163 3 : void GstGenericPlayer::removeAutoAudioSinkChild(GObject *object)
2164 : {
2165 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2166 : {
2167 3 : RIALTO_SERVER_LOG_DEBUG("Remove AutoAudioSink child sink");
2168 :
2169 3 : if (m_context.autoAudioChildSink && m_context.autoAudioChildSink != GST_ELEMENT(object))
2170 : {
2171 1 : RIALTO_SERVER_LOG_MIL("AutoAudioSink child sink is not the same as the one stored");
2172 1 : return;
2173 : }
2174 :
2175 2 : m_context.autoAudioChildSink = nullptr;
2176 : }
2177 : }
2178 :
2179 14 : GstElement *GstGenericPlayer::getSinkChildIfAutoVideoSink(GstElement *sink) const
2180 : {
2181 14 : const gchar *kTmpName = m_glibWrapper->gTypeName(G_OBJECT_TYPE(sink));
2182 14 : if (!kTmpName)
2183 0 : return sink;
2184 :
2185 28 : const std::string kElementTypeName{kTmpName};
2186 14 : if (kElementTypeName == "GstAutoVideoSink")
2187 : {
2188 1 : if (!m_context.autoVideoChildSink)
2189 : {
2190 0 : RIALTO_SERVER_LOG_WARN("No child sink has been added to the autovideosink");
2191 : }
2192 : else
2193 : {
2194 1 : return m_context.autoVideoChildSink;
2195 : }
2196 : }
2197 13 : return sink;
2198 14 : }
2199 :
2200 11 : GstElement *GstGenericPlayer::getSinkChildIfAutoAudioSink(GstElement *sink) const
2201 : {
2202 11 : const gchar *kTmpName = m_glibWrapper->gTypeName(G_OBJECT_TYPE(sink));
2203 11 : if (!kTmpName)
2204 0 : return sink;
2205 :
2206 22 : const std::string kElementTypeName{kTmpName};
2207 11 : if (kElementTypeName == "GstAutoAudioSink")
2208 : {
2209 1 : if (!m_context.autoAudioChildSink)
2210 : {
2211 0 : RIALTO_SERVER_LOG_WARN("No child sink has been added to the autoaudiosink");
2212 : }
2213 : else
2214 : {
2215 1 : return m_context.autoAudioChildSink;
2216 : }
2217 : }
2218 10 : return sink;
2219 11 : }
2220 :
2221 207 : void GstGenericPlayer::setPlaybinFlags(bool enableAudio)
2222 : {
2223 207 : unsigned flags = getGstPlayFlag("video") | getGstPlayFlag("native-video") | getGstPlayFlag("text");
2224 :
2225 207 : if (enableAudio)
2226 : {
2227 207 : flags |= getGstPlayFlag("audio");
2228 207 : flags |= shouldEnableNativeAudio() ? getGstPlayFlag("native-audio") : 0;
2229 : }
2230 :
2231 207 : m_glibWrapper->gObjectSet(m_context.pipeline, "flags", flags, nullptr);
2232 : }
2233 :
2234 207 : bool GstGenericPlayer::shouldEnableNativeAudio()
2235 : {
2236 207 : GstElementFactory *factory = m_gstWrapper->gstElementFactoryFind("brcmaudiosink");
2237 207 : if (factory)
2238 : {
2239 1 : m_gstWrapper->gstObjectUnref(GST_OBJECT(factory));
2240 1 : return true;
2241 : }
2242 206 : return false;
2243 : }
2244 :
2245 : }; // namespace firebolt::rialto::server
|