Line data Source code
1 : /*
2 : * If not stated otherwise in this file or this component's LICENSE file the
3 : * following copyright and licenses apply:
4 : *
5 : * Copyright 2022 Sky UK
6 : *
7 : * Licensed under the Apache License, Version 2.0 (the "License");
8 : * you may not use this file except in compliance with the License.
9 : * You may obtain a copy of the License at
10 : *
11 : * http://www.apache.org/licenses/LICENSE-2.0
12 : *
13 : * Unless required by applicable law or agreed to in writing, software
14 : * distributed under the License is distributed on an "AS IS" BASIS,
15 : * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 : * See the License for the specific language governing permissions and
17 : * limitations under the License.
18 : */
19 :
20 : #include <chrono>
21 : #include <cinttypes>
22 : #include <stdexcept>
23 :
24 : #include "FlushWatcher.h"
25 : #include "GstDispatcherThread.h"
26 : #include "GstGenericPlayer.h"
27 : #include "GstProtectionMetadata.h"
28 : #include "IGstTextTrackSinkFactory.h"
29 : #include "IMediaPipeline.h"
30 : #include "ITimer.h"
31 : #include "RialtoServerLogging.h"
32 : #include "TypeConverters.h"
33 : #include "Utils.h"
34 : #include "WorkerThread.h"
35 : #include "tasks/generic/GenericPlayerTaskFactory.h"
36 :
37 : namespace
38 : {
39 : /**
40 : * @brief Report position interval in ms.
41 : * The position reporting timer should be started whenever the PLAYING state is entered and stopped
42 : * whenever the session moves to another playback state.
43 : */
44 : constexpr std::chrono::milliseconds kPositionReportTimerMs{250};
45 : constexpr std::chrono::seconds kSubtitleClockResyncInterval{10};
46 :
47 1 : bool operator==(const firebolt::rialto::server::SegmentData &lhs, const firebolt::rialto::server::SegmentData &rhs)
48 : {
49 2 : return (lhs.position == rhs.position) && (lhs.resetTime == rhs.resetTime) && (lhs.appliedRate == rhs.appliedRate) &&
50 2 : (lhs.stopPosition == rhs.stopPosition);
51 : }
52 : } // namespace
53 :
54 : namespace firebolt::rialto::server
55 : {
56 : std::weak_ptr<IGstGenericPlayerFactory> GstGenericPlayerFactory::m_factory;
57 :
58 3 : std::shared_ptr<IGstGenericPlayerFactory> IGstGenericPlayerFactory::getFactory()
59 : {
60 3 : std::shared_ptr<IGstGenericPlayerFactory> factory = GstGenericPlayerFactory::m_factory.lock();
61 :
62 3 : if (!factory)
63 : {
64 : try
65 : {
66 3 : factory = std::make_shared<GstGenericPlayerFactory>();
67 : }
68 0 : catch (const std::exception &e)
69 : {
70 0 : RIALTO_SERVER_LOG_ERROR("Failed to create the gstreamer player factory, reason: %s", e.what());
71 : }
72 :
73 3 : GstGenericPlayerFactory::m_factory = factory;
74 : }
75 :
76 3 : return factory;
77 : }
78 :
79 1 : std::unique_ptr<IGstGenericPlayer> GstGenericPlayerFactory::createGstGenericPlayer(
80 : IGstGenericPlayerClient *client, IDecryptionService &decryptionService, MediaType type,
81 : const VideoRequirements &videoRequirements,
82 : const std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapperFactory> &rdkGstreamerUtilsWrapperFactory)
83 : {
84 1 : std::unique_ptr<IGstGenericPlayer> gstPlayer;
85 :
86 : try
87 : {
88 1 : auto gstWrapperFactory = firebolt::rialto::wrappers::IGstWrapperFactory::getFactory();
89 1 : auto glibWrapperFactory = firebolt::rialto::wrappers::IGlibWrapperFactory::getFactory();
90 1 : std::shared_ptr<firebolt::rialto::wrappers::IGstWrapper> gstWrapper;
91 1 : std::shared_ptr<firebolt::rialto::wrappers::IGlibWrapper> glibWrapper;
92 1 : std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapper> rdkGstreamerUtilsWrapper;
93 1 : if ((!gstWrapperFactory) || (!(gstWrapper = gstWrapperFactory->getGstWrapper())))
94 : {
95 0 : throw std::runtime_error("Cannot create GstWrapper");
96 : }
97 1 : if ((!glibWrapperFactory) || (!(glibWrapper = glibWrapperFactory->getGlibWrapper())))
98 : {
99 0 : throw std::runtime_error("Cannot create GlibWrapper");
100 : }
101 2 : if ((!rdkGstreamerUtilsWrapperFactory) ||
102 2 : (!(rdkGstreamerUtilsWrapper = rdkGstreamerUtilsWrapperFactory->createRdkGstreamerUtilsWrapper())))
103 : {
104 0 : throw std::runtime_error("Cannot create RdkGstreamerUtilsWrapper");
105 : }
106 : gstPlayer = std::make_unique<
107 2 : GstGenericPlayer>(client, decryptionService, type, videoRequirements, gstWrapper, glibWrapper,
108 2 : rdkGstreamerUtilsWrapper, IGstInitialiser::instance(), std::make_unique<FlushWatcher>(),
109 2 : IGstSrcFactory::getFactory(), common::ITimerFactory::getFactory(),
110 2 : std::make_unique<GenericPlayerTaskFactory>(client, gstWrapper, glibWrapper,
111 : rdkGstreamerUtilsWrapper,
112 2 : IGstTextTrackSinkFactory::createFactory()),
113 2 : std::make_unique<WorkerThreadFactory>(), std::make_unique<GstDispatcherThreadFactory>(),
114 3 : IGstProtectionMetadataHelperFactory::createFactory());
115 1 : }
116 0 : catch (const std::exception &e)
117 : {
118 0 : RIALTO_SERVER_LOG_ERROR("Failed to create the gstreamer player, reason: %s", e.what());
119 : }
120 :
121 1 : return gstPlayer;
122 : }
123 :
124 216 : GstGenericPlayer::GstGenericPlayer(
125 : IGstGenericPlayerClient *client, IDecryptionService &decryptionService, MediaType type,
126 : const VideoRequirements &videoRequirements,
127 : const std::shared_ptr<firebolt::rialto::wrappers::IGstWrapper> &gstWrapper,
128 : const std::shared_ptr<firebolt::rialto::wrappers::IGlibWrapper> &glibWrapper,
129 : const std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapper> &rdkGstreamerUtilsWrapper,
130 : const IGstInitialiser &gstInitialiser, std::unique_ptr<IFlushWatcher> &&flushWatcher,
131 : const std::shared_ptr<IGstSrcFactory> &gstSrcFactory, std::shared_ptr<common::ITimerFactory> timerFactory,
132 : std::unique_ptr<IGenericPlayerTaskFactory> taskFactory, std::unique_ptr<IWorkerThreadFactory> workerThreadFactory,
133 : std::unique_ptr<IGstDispatcherThreadFactory> gstDispatcherThreadFactory,
134 216 : std::shared_ptr<IGstProtectionMetadataHelperFactory> gstProtectionMetadataFactory)
135 216 : : m_gstPlayerClient(client), m_gstWrapper{gstWrapper}, m_glibWrapper{glibWrapper},
136 432 : m_rdkGstreamerUtilsWrapper{rdkGstreamerUtilsWrapper}, m_timerFactory{timerFactory},
137 648 : m_taskFactory{std::move(taskFactory)}, m_flushWatcher{std::move(flushWatcher)}
138 : {
139 216 : RIALTO_SERVER_LOG_DEBUG("GstGenericPlayer is constructed.");
140 :
141 216 : gstInitialiser.waitForInitialisation();
142 :
143 216 : m_context.decryptionService = &decryptionService;
144 :
145 216 : if ((!gstSrcFactory) || (!(m_context.gstSrc = gstSrcFactory->getGstSrc())))
146 : {
147 2 : throw std::runtime_error("Cannot create GstSrc");
148 : }
149 :
150 214 : if (!timerFactory)
151 : {
152 1 : throw std::runtime_error("TimeFactory is invalid");
153 : }
154 :
155 426 : if ((!gstProtectionMetadataFactory) ||
156 426 : (!(m_protectionMetadataWrapper = gstProtectionMetadataFactory->createProtectionMetadataWrapper(m_gstWrapper))))
157 : {
158 0 : throw std::runtime_error("Cannot create protection metadata wrapper");
159 : }
160 :
161 : // Ensure that rialtosrc has been initalised
162 213 : m_context.gstSrc->initSrc();
163 :
164 : // Start task thread
165 213 : if ((!workerThreadFactory) || (!(m_workerThread = workerThreadFactory->createWorkerThread())))
166 : {
167 0 : throw std::runtime_error("Failed to create the worker thread");
168 : }
169 :
170 : // Initialise pipeline
171 213 : switch (type)
172 : {
173 212 : case MediaType::MSE:
174 : {
175 212 : initMsePipeline();
176 212 : break;
177 : }
178 1 : default:
179 : {
180 1 : resetWorkerThread();
181 1 : throw std::runtime_error("Media type not supported");
182 : }
183 : }
184 :
185 : // Check the video requirements for a limited video.
186 : // If the video requirements are set to anything lower than the minimum, this playback is assumed to be a secondary
187 : // video in a dual video scenario.
188 212 : if ((kMinPrimaryVideoWidth > videoRequirements.maxWidth) || (kMinPrimaryVideoHeight > videoRequirements.maxHeight))
189 : {
190 8 : RIALTO_SERVER_LOG_MIL("Secondary video playback selected");
191 8 : bool westerossinkSecondaryVideoResult = setWesterossinkSecondaryVideo();
192 8 : bool ermContextResult = setErmContext();
193 8 : if (!westerossinkSecondaryVideoResult && !ermContextResult)
194 : {
195 1 : resetWorkerThread();
196 1 : termPipeline();
197 1 : throw std::runtime_error("Could not set secondary video");
198 : }
199 7 : }
200 : else
201 : {
202 204 : RIALTO_SERVER_LOG_MIL("Primary video playback selected");
203 : }
204 :
205 : m_gstDispatcherThread =
206 211 : gstDispatcherThreadFactory->createGstDispatcherThread(*this, m_context.pipeline, m_gstWrapper);
207 296 : }
208 :
209 422 : GstGenericPlayer::~GstGenericPlayer()
210 : {
211 211 : RIALTO_SERVER_LOG_DEBUG("GstGenericPlayer is destructed.");
212 211 : m_gstDispatcherThread.reset();
213 :
214 211 : resetWorkerThread();
215 :
216 211 : termPipeline();
217 422 : }
218 :
219 212 : void GstGenericPlayer::initMsePipeline()
220 : {
221 : // Make playbin
222 212 : m_context.pipeline = m_gstWrapper->gstElementFactoryMake("playbin", "media_pipeline");
223 : // Set pipeline flags
224 212 : setPlaybinFlags(true);
225 :
226 : // Set callbacks
227 212 : m_glibWrapper->gSignalConnect(m_context.pipeline, "source-setup", G_CALLBACK(&GstGenericPlayer::setupSource), this);
228 212 : m_glibWrapper->gSignalConnect(m_context.pipeline, "element-setup", G_CALLBACK(&GstGenericPlayer::setupElement), this);
229 212 : m_glibWrapper->gSignalConnect(m_context.pipeline, "deep-element-added",
230 : G_CALLBACK(&GstGenericPlayer::deepElementAdded), this);
231 :
232 : // Set uri
233 212 : m_glibWrapper->gObjectSet(m_context.pipeline, "uri", "rialto://", nullptr);
234 :
235 : // Check playsink
236 212 : GstElement *playsink = (m_gstWrapper->gstBinGetByName(GST_BIN(m_context.pipeline), "playsink"));
237 212 : if (playsink)
238 : {
239 211 : m_glibWrapper->gObjectSet(G_OBJECT(playsink), "send-event-mode", 0, nullptr);
240 211 : m_gstWrapper->gstObjectUnref(playsink);
241 : }
242 : else
243 : {
244 1 : GST_WARNING("No playsink ?!?!?");
245 : }
246 212 : if (GST_STATE_CHANGE_FAILURE == m_gstWrapper->gstElementSetState(m_context.pipeline, GST_STATE_READY))
247 : {
248 1 : GST_WARNING("Failed to set pipeline to READY state");
249 : }
250 212 : RIALTO_SERVER_LOG_MIL("New RialtoServer's pipeline created");
251 : }
252 :
253 213 : void GstGenericPlayer::resetWorkerThread()
254 : {
255 : // Shutdown task thread
256 213 : m_workerThread->enqueueTask(m_taskFactory->createShutdown(*this));
257 213 : m_workerThread->join();
258 213 : m_workerThread.reset();
259 : }
260 :
261 212 : void GstGenericPlayer::termPipeline()
262 : {
263 212 : if (m_finishSourceSetupTimer && m_finishSourceSetupTimer->isActive())
264 : {
265 0 : m_finishSourceSetupTimer->cancel();
266 : }
267 :
268 212 : m_finishSourceSetupTimer.reset();
269 :
270 261 : for (auto &elem : m_context.streamInfo)
271 : {
272 49 : StreamInfo &streamInfo = elem.second;
273 51 : for (auto &buffer : streamInfo.buffers)
274 : {
275 2 : m_gstWrapper->gstBufferUnref(buffer);
276 : }
277 :
278 49 : streamInfo.buffers.clear();
279 : }
280 :
281 212 : m_taskFactory->createStop(m_context, *this)->execute();
282 212 : GstBus *bus = m_gstWrapper->gstPipelineGetBus(GST_PIPELINE(m_context.pipeline));
283 212 : m_gstWrapper->gstBusSetSyncHandler(bus, nullptr, nullptr, nullptr);
284 212 : m_gstWrapper->gstObjectUnref(bus);
285 :
286 212 : if (m_context.source)
287 : {
288 1 : m_gstWrapper->gstObjectUnref(m_context.source);
289 : }
290 212 : if (m_context.subtitleSink)
291 : {
292 4 : m_gstWrapper->gstObjectUnref(m_context.subtitleSink);
293 4 : m_context.subtitleSink = nullptr;
294 : }
295 :
296 212 : if (m_context.videoSink)
297 : {
298 0 : m_gstWrapper->gstObjectUnref(m_context.videoSink);
299 0 : m_context.videoSink = nullptr;
300 : }
301 :
302 : // Delete the pipeline
303 212 : m_gstWrapper->gstObjectUnref(m_context.pipeline);
304 :
305 212 : RIALTO_SERVER_LOG_MIL("RialtoServer's pipeline terminated");
306 : }
307 :
308 849 : unsigned GstGenericPlayer::getGstPlayFlag(const char *nick)
309 : {
310 : GFlagsClass *flagsClass =
311 849 : static_cast<GFlagsClass *>(m_glibWrapper->gTypeClassRef(m_glibWrapper->gTypeFromName("GstPlayFlags")));
312 849 : GFlagsValue *flag = m_glibWrapper->gFlagsGetValueByNick(flagsClass, nick);
313 849 : return flag ? flag->value : 0;
314 : }
315 :
316 1 : void GstGenericPlayer::setupSource(GstElement *pipeline, GstElement *source, GstGenericPlayer *self)
317 : {
318 1 : self->m_gstWrapper->gstObjectRef(source);
319 1 : if (self->m_workerThread)
320 : {
321 1 : self->m_workerThread->enqueueTask(self->m_taskFactory->createSetupSource(self->m_context, *self, source));
322 : }
323 : }
324 :
325 1 : void GstGenericPlayer::setupElement(GstElement *pipeline, GstElement *element, GstGenericPlayer *self)
326 : {
327 1 : RIALTO_SERVER_LOG_DEBUG("Element %s added to the pipeline", GST_ELEMENT_NAME(element));
328 1 : self->m_gstWrapper->gstObjectRef(element);
329 1 : if (self->m_workerThread)
330 : {
331 1 : self->m_workerThread->enqueueTask(self->m_taskFactory->createSetupElement(self->m_context, *self, element));
332 : }
333 : }
334 :
335 1 : void GstGenericPlayer::deepElementAdded(GstBin *pipeline, GstBin *bin, GstElement *element, GstGenericPlayer *self)
336 : {
337 1 : RIALTO_SERVER_LOG_DEBUG("Deep element %s added to the pipeline", GST_ELEMENT_NAME(element));
338 1 : if (self->m_workerThread)
339 : {
340 2 : self->m_workerThread->enqueueTask(
341 2 : self->m_taskFactory->createDeepElementAdded(self->m_context, *self, pipeline, bin, element));
342 : }
343 1 : }
344 :
345 1 : void GstGenericPlayer::attachSource(const std::unique_ptr<IMediaPipeline::MediaSource> &attachedSource)
346 : {
347 1 : if (m_workerThread)
348 : {
349 1 : m_workerThread->enqueueTask(m_taskFactory->createAttachSource(m_context, *this, attachedSource));
350 : }
351 : }
352 :
353 1 : void GstGenericPlayer::removeSource(const MediaSourceType &mediaSourceType)
354 : {
355 1 : if (m_workerThread)
356 : {
357 1 : m_workerThread->enqueueTask(m_taskFactory->createRemoveSource(m_context, *this, mediaSourceType));
358 : }
359 : }
360 :
361 89 : void GstGenericPlayer::allSourcesAttached()
362 : {
363 89 : if (m_workerThread)
364 : {
365 89 : m_workerThread->enqueueTask(m_taskFactory->createFinishSetupSource(m_context, *this));
366 : }
367 : }
368 :
369 1 : void GstGenericPlayer::attachSamples(const IMediaPipeline::MediaSegmentVector &mediaSegments)
370 : {
371 1 : if (m_workerThread)
372 : {
373 1 : m_workerThread->enqueueTask(m_taskFactory->createAttachSamples(m_context, *this, mediaSegments));
374 : }
375 : }
376 :
377 1 : void GstGenericPlayer::attachSamples(const std::shared_ptr<IDataReader> &dataReader)
378 : {
379 1 : if (m_workerThread)
380 : {
381 1 : m_workerThread->enqueueTask(m_taskFactory->createReadShmDataAndAttachSamples(m_context, *this, dataReader));
382 : }
383 : }
384 :
385 1 : void GstGenericPlayer::setPosition(std::int64_t position)
386 : {
387 1 : if (m_workerThread)
388 : {
389 1 : m_workerThread->enqueueTask(m_taskFactory->createSetPosition(m_context, *this, position));
390 : }
391 : }
392 :
393 1 : void GstGenericPlayer::setPlaybackRate(double rate)
394 : {
395 1 : if (m_workerThread)
396 : {
397 1 : m_workerThread->enqueueTask(m_taskFactory->createSetPlaybackRate(m_context, rate));
398 : }
399 : }
400 :
401 6 : bool GstGenericPlayer::getPosition(std::int64_t &position)
402 : {
403 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
404 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
405 6 : position = getPosition(m_context.pipeline);
406 6 : if (position == -1)
407 : {
408 4 : RIALTO_SERVER_LOG_WARN("Query position failed");
409 4 : return false;
410 : }
411 :
412 2 : return true;
413 : }
414 :
415 38 : GstElement *GstGenericPlayer::getSink(const MediaSourceType &mediaSourceType) const
416 : {
417 38 : const char *kSinkName{nullptr};
418 38 : GstElement *sink{nullptr};
419 38 : switch (mediaSourceType)
420 : {
421 18 : case MediaSourceType::AUDIO:
422 18 : kSinkName = "audio-sink";
423 18 : break;
424 18 : case MediaSourceType::VIDEO:
425 18 : kSinkName = "video-sink";
426 18 : break;
427 2 : default:
428 2 : break;
429 : }
430 38 : if (!kSinkName)
431 : {
432 2 : RIALTO_SERVER_LOG_WARN("mediaSourceType not supported %d", static_cast<int>(mediaSourceType));
433 : }
434 : else
435 : {
436 36 : if (m_context.pipeline == nullptr)
437 : {
438 0 : RIALTO_SERVER_LOG_WARN("Pipeline is NULL!");
439 : }
440 : else
441 : {
442 36 : RIALTO_SERVER_LOG_DEBUG("Pipeline is valid: %p", m_context.pipeline);
443 : }
444 36 : m_glibWrapper->gObjectGet(m_context.pipeline, kSinkName, &sink, nullptr);
445 36 : if (sink)
446 : {
447 25 : GstElement *autoSink{sink};
448 25 : if (firebolt::rialto::MediaSourceType::VIDEO == mediaSourceType)
449 14 : autoSink = getSinkChildIfAutoVideoSink(sink);
450 11 : else if (firebolt::rialto::MediaSourceType::AUDIO == mediaSourceType)
451 11 : autoSink = getSinkChildIfAutoAudioSink(sink);
452 :
453 : // Is this an auto-sink?...
454 25 : if (autoSink != sink)
455 : {
456 2 : m_gstWrapper->gstObjectUnref(GST_OBJECT(sink));
457 :
458 : // increase the reference count of the auto sink
459 2 : sink = GST_ELEMENT(m_gstWrapper->gstObjectRef(GST_OBJECT(autoSink)));
460 : }
461 : }
462 : else
463 : {
464 11 : RIALTO_SERVER_LOG_WARN("%s could not be obtained", kSinkName);
465 : }
466 : }
467 38 : return sink;
468 : }
469 :
470 1 : void GstGenericPlayer::setSourceFlushed(const MediaSourceType &mediaSourceType)
471 : {
472 1 : m_flushWatcher->setFlushed(mediaSourceType);
473 : }
474 :
475 19 : GstElement *GstGenericPlayer::getDecoder(const MediaSourceType &mediaSourceType)
476 : {
477 19 : GstIterator *it = m_gstWrapper->gstBinIterateRecurse(GST_BIN(m_context.pipeline));
478 19 : GValue item = G_VALUE_INIT;
479 19 : gboolean done = FALSE;
480 :
481 28 : while (!done)
482 : {
483 21 : switch (m_gstWrapper->gstIteratorNext(it, &item))
484 : {
485 12 : case GST_ITERATOR_OK:
486 : {
487 12 : GstElement *element = GST_ELEMENT(m_glibWrapper->gValueGetObject(&item));
488 12 : GstElementFactory *factory = m_gstWrapper->gstElementGetFactory(element);
489 :
490 12 : if (factory)
491 : {
492 12 : GstElementFactoryListType type = GST_ELEMENT_FACTORY_TYPE_DECODER;
493 12 : if (mediaSourceType == MediaSourceType::AUDIO)
494 : {
495 12 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO;
496 : }
497 0 : else if (mediaSourceType == MediaSourceType::VIDEO)
498 : {
499 0 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO;
500 : }
501 :
502 12 : if (m_gstWrapper->gstElementFactoryListIsType(factory, type))
503 : {
504 12 : m_glibWrapper->gValueUnset(&item);
505 12 : m_gstWrapper->gstIteratorFree(it);
506 12 : return GST_ELEMENT(m_gstWrapper->gstObjectRef(element));
507 : }
508 : }
509 :
510 0 : m_glibWrapper->gValueUnset(&item);
511 0 : break;
512 : }
513 2 : case GST_ITERATOR_RESYNC:
514 2 : m_gstWrapper->gstIteratorResync(it);
515 2 : break;
516 7 : case GST_ITERATOR_ERROR:
517 : case GST_ITERATOR_DONE:
518 7 : done = TRUE;
519 7 : break;
520 : }
521 : }
522 :
523 7 : RIALTO_SERVER_LOG_WARN("Could not find decoder");
524 :
525 7 : m_glibWrapper->gValueUnset(&item);
526 7 : m_gstWrapper->gstIteratorFree(it);
527 :
528 7 : return nullptr;
529 : }
530 :
531 3 : GstElement *GstGenericPlayer::getParser(const MediaSourceType &mediaSourceType)
532 : {
533 3 : GstIterator *it = m_gstWrapper->gstBinIterateRecurse(GST_BIN(m_context.pipeline));
534 3 : GValue item = G_VALUE_INIT;
535 3 : gboolean done = FALSE;
536 :
537 4 : while (!done)
538 : {
539 3 : switch (m_gstWrapper->gstIteratorNext(it, &item))
540 : {
541 2 : case GST_ITERATOR_OK:
542 : {
543 2 : GstElement *element = GST_ELEMENT(m_glibWrapper->gValueGetObject(&item));
544 2 : GstElementFactory *factory = m_gstWrapper->gstElementGetFactory(element);
545 :
546 2 : if (factory)
547 : {
548 2 : GstElementFactoryListType type = GST_ELEMENT_FACTORY_TYPE_PARSER;
549 2 : if (mediaSourceType == MediaSourceType::AUDIO)
550 : {
551 0 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO;
552 : }
553 2 : else if (mediaSourceType == MediaSourceType::VIDEO)
554 : {
555 2 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO;
556 : }
557 :
558 2 : if (m_gstWrapper->gstElementFactoryListIsType(factory, type))
559 : {
560 2 : m_glibWrapper->gValueUnset(&item);
561 2 : m_gstWrapper->gstIteratorFree(it);
562 2 : return GST_ELEMENT(m_gstWrapper->gstObjectRef(element));
563 : }
564 : }
565 :
566 0 : m_glibWrapper->gValueUnset(&item);
567 0 : break;
568 : }
569 0 : case GST_ITERATOR_RESYNC:
570 0 : m_gstWrapper->gstIteratorResync(it);
571 0 : break;
572 1 : case GST_ITERATOR_ERROR:
573 : case GST_ITERATOR_DONE:
574 1 : done = TRUE;
575 1 : break;
576 : }
577 : }
578 :
579 1 : RIALTO_SERVER_LOG_WARN("Could not find parser");
580 :
581 1 : m_glibWrapper->gValueUnset(&item);
582 1 : m_gstWrapper->gstIteratorFree(it);
583 :
584 1 : return nullptr;
585 : }
586 :
587 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate>
588 5 : GstGenericPlayer::createAudioAttributes(const std::unique_ptr<IMediaPipeline::MediaSource> &source) const
589 : {
590 5 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate> audioAttributes;
591 5 : const IMediaPipeline::MediaSourceAudio *kSource = dynamic_cast<IMediaPipeline::MediaSourceAudio *>(source.get());
592 5 : if (kSource)
593 : {
594 4 : firebolt::rialto::AudioConfig audioConfig = kSource->getAudioConfig();
595 : audioAttributes =
596 12 : firebolt::rialto::wrappers::AudioAttributesPrivate{"", // param set below.
597 4 : audioConfig.numberOfChannels, audioConfig.sampleRate,
598 : 0, // used only in one of logs in rdk_gstreamer_utils, no
599 : // need to set this param.
600 : 0, // used only in one of logs in rdk_gstreamer_utils, no
601 : // need to set this param.
602 4 : audioConfig.codecSpecificConfig.data(),
603 : static_cast<std::uint32_t>(
604 4 : audioConfig.codecSpecificConfig.size())};
605 4 : if (source->getMimeType() == "audio/mp4" || source->getMimeType() == "audio/aac")
606 : {
607 2 : audioAttributes->m_codecParam = "mp4a";
608 : }
609 2 : else if (source->getMimeType() == "audio/x-eac3")
610 : {
611 1 : audioAttributes->m_codecParam = "ec-3";
612 : }
613 1 : else if (source->getMimeType() == "audio/b-wav" || source->getMimeType() == "audio/x-raw")
614 : {
615 1 : audioAttributes->m_codecParam = "lpcm";
616 : }
617 4 : }
618 : else
619 : {
620 1 : RIALTO_SERVER_LOG_ERROR("Failed to cast source");
621 : }
622 :
623 5 : return audioAttributes;
624 : }
625 :
626 1 : bool GstGenericPlayer::setImmediateOutput(const MediaSourceType &mediaSourceType, bool immediateOutputParam)
627 : {
628 1 : if (!m_workerThread)
629 0 : return false;
630 :
631 2 : m_workerThread->enqueueTask(
632 2 : m_taskFactory->createSetImmediateOutput(m_context, *this, mediaSourceType, immediateOutputParam));
633 1 : return true;
634 : }
635 :
636 5 : bool GstGenericPlayer::getImmediateOutput(const MediaSourceType &mediaSourceType, bool &immediateOutputRef)
637 : {
638 5 : bool returnValue{false};
639 5 : GstElement *sink{getSink(mediaSourceType)};
640 5 : if (sink)
641 : {
642 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "immediate-output"))
643 : {
644 2 : m_glibWrapper->gObjectGet(sink, "immediate-output", &immediateOutputRef, nullptr);
645 2 : returnValue = true;
646 : }
647 : else
648 : {
649 1 : RIALTO_SERVER_LOG_ERROR("immediate-output not supported in element %s", GST_ELEMENT_NAME(sink));
650 : }
651 3 : m_gstWrapper->gstObjectUnref(sink);
652 : }
653 : else
654 : {
655 2 : RIALTO_SERVER_LOG_ERROR("Failed to set immediate-output property, sink is NULL");
656 : }
657 :
658 5 : return returnValue;
659 : }
660 :
661 5 : bool GstGenericPlayer::getStats(const MediaSourceType &mediaSourceType, uint64_t &renderedFrames, uint64_t &droppedFrames)
662 : {
663 5 : bool returnValue{false};
664 5 : GstElement *sink{getSink(mediaSourceType)};
665 5 : if (sink)
666 : {
667 3 : GstStructure *stats{nullptr};
668 3 : m_glibWrapper->gObjectGet(sink, "stats", &stats, nullptr);
669 3 : if (!stats)
670 : {
671 1 : RIALTO_SERVER_LOG_ERROR("failed to get stats from '%s'", GST_ELEMENT_NAME(sink));
672 : }
673 : else
674 : {
675 : guint64 renderedFramesTmp;
676 : guint64 droppedFramesTmp;
677 3 : if (m_gstWrapper->gstStructureGetUint64(stats, "rendered", &renderedFramesTmp) &&
678 1 : m_gstWrapper->gstStructureGetUint64(stats, "dropped", &droppedFramesTmp))
679 : {
680 1 : renderedFrames = renderedFramesTmp;
681 1 : droppedFrames = droppedFramesTmp;
682 1 : returnValue = true;
683 : }
684 : else
685 : {
686 1 : RIALTO_SERVER_LOG_ERROR("failed to get 'rendered' or 'dropped' from structure (%s)",
687 : GST_ELEMENT_NAME(sink));
688 : }
689 2 : m_gstWrapper->gstStructureFree(stats);
690 : }
691 3 : m_gstWrapper->gstObjectUnref(sink);
692 : }
693 :
694 5 : return returnValue;
695 : }
696 :
697 4 : GstBuffer *GstGenericPlayer::createBuffer(const IMediaPipeline::MediaSegment &mediaSegment) const
698 : {
699 4 : GstBuffer *gstBuffer = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getDataLength(), nullptr);
700 4 : m_gstWrapper->gstBufferFill(gstBuffer, 0, mediaSegment.getData(), mediaSegment.getDataLength());
701 :
702 4 : if (mediaSegment.isEncrypted())
703 : {
704 3 : GstBuffer *keyId = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getKeyId().size(), nullptr);
705 3 : m_gstWrapper->gstBufferFill(keyId, 0, mediaSegment.getKeyId().data(), mediaSegment.getKeyId().size());
706 :
707 3 : GstBuffer *initVector = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getInitVector().size(), nullptr);
708 6 : m_gstWrapper->gstBufferFill(initVector, 0, mediaSegment.getInitVector().data(),
709 3 : mediaSegment.getInitVector().size());
710 3 : GstBuffer *subsamples{nullptr};
711 3 : if (!mediaSegment.getSubSamples().empty())
712 : {
713 3 : auto subsamplesRawSize = mediaSegment.getSubSamples().size() * (sizeof(guint16) + sizeof(guint32));
714 3 : guint8 *subsamplesRaw = static_cast<guint8 *>(m_glibWrapper->gMalloc(subsamplesRawSize));
715 : GstByteWriter writer;
716 3 : m_gstWrapper->gstByteWriterInitWithData(&writer, subsamplesRaw, subsamplesRawSize, FALSE);
717 :
718 6 : for (const auto &subSample : mediaSegment.getSubSamples())
719 : {
720 3 : m_gstWrapper->gstByteWriterPutUint16Be(&writer, subSample.numClearBytes);
721 3 : m_gstWrapper->gstByteWriterPutUint32Be(&writer, subSample.numEncryptedBytes);
722 : }
723 3 : subsamples = m_gstWrapper->gstBufferNewWrapped(subsamplesRaw, subsamplesRawSize);
724 : }
725 :
726 3 : uint32_t crypt = 0;
727 3 : uint32_t skip = 0;
728 3 : bool encryptionPatternSet = mediaSegment.getEncryptionPattern(crypt, skip);
729 :
730 3 : GstRialtoProtectionData data = {mediaSegment.getMediaKeySessionId(),
731 3 : static_cast<uint32_t>(mediaSegment.getSubSamples().size()),
732 3 : mediaSegment.getInitWithLast15(),
733 : keyId,
734 : initVector,
735 : subsamples,
736 6 : mediaSegment.getCipherMode(),
737 : crypt,
738 : skip,
739 : encryptionPatternSet,
740 6 : m_context.decryptionService};
741 :
742 3 : if (!m_protectionMetadataWrapper->addProtectionMetadata(gstBuffer, data))
743 : {
744 1 : RIALTO_SERVER_LOG_ERROR("Failed to add protection metadata");
745 1 : if (keyId)
746 : {
747 1 : m_gstWrapper->gstBufferUnref(keyId);
748 : }
749 1 : if (initVector)
750 : {
751 1 : m_gstWrapper->gstBufferUnref(initVector);
752 : }
753 1 : if (subsamples)
754 : {
755 1 : m_gstWrapper->gstBufferUnref(subsamples);
756 : }
757 : }
758 : }
759 :
760 4 : GST_BUFFER_TIMESTAMP(gstBuffer) = mediaSegment.getTimeStamp();
761 4 : GST_BUFFER_DURATION(gstBuffer) = mediaSegment.getDuration();
762 4 : return gstBuffer;
763 : }
764 :
765 4 : void GstGenericPlayer::notifyNeedMediaData(const MediaSourceType mediaSource)
766 : {
767 4 : auto elem = m_context.streamInfo.find(mediaSource);
768 4 : if (elem != m_context.streamInfo.end())
769 : {
770 2 : StreamInfo &streamInfo = elem->second;
771 2 : streamInfo.isNeedDataPending = false;
772 :
773 : // Send new NeedMediaData if we still need it
774 2 : if (m_gstPlayerClient && streamInfo.isDataNeeded)
775 : {
776 2 : streamInfo.isNeedDataPending = m_gstPlayerClient->notifyNeedMediaData(mediaSource);
777 : }
778 : }
779 : else
780 : {
781 2 : RIALTO_SERVER_LOG_WARN("Media type %s could not be found", common::convertMediaSourceType(mediaSource));
782 : }
783 4 : }
784 :
785 19 : void GstGenericPlayer::attachData(const firebolt::rialto::MediaSourceType mediaType)
786 : {
787 19 : auto elem = m_context.streamInfo.find(mediaType);
788 19 : if (elem != m_context.streamInfo.end())
789 : {
790 16 : StreamInfo &streamInfo = elem->second;
791 16 : if (streamInfo.buffers.empty() || !streamInfo.isDataNeeded)
792 : {
793 2 : return;
794 : }
795 :
796 14 : if (firebolt::rialto::MediaSourceType::SUBTITLE == mediaType)
797 : {
798 2 : setTextTrackPositionIfRequired(streamInfo.appSrc);
799 : }
800 : else
801 : {
802 36 : pushSampleIfRequired(streamInfo.appSrc, common::convertMediaSourceType(mediaType));
803 : }
804 14 : if (mediaType == firebolt::rialto::MediaSourceType::AUDIO)
805 : {
806 : // This needs to be done before gstAppSrcPushBuffer() is
807 : // called because it can free the memory
808 7 : m_context.lastAudioSampleTimestamps = static_cast<int64_t>(GST_BUFFER_PTS(streamInfo.buffers.back()));
809 : }
810 :
811 28 : for (GstBuffer *buffer : streamInfo.buffers)
812 : {
813 14 : m_gstWrapper->gstAppSrcPushBuffer(GST_APP_SRC(streamInfo.appSrc), buffer);
814 : }
815 14 : streamInfo.buffers.clear();
816 14 : streamInfo.isDataPushed = true;
817 :
818 14 : const bool kIsSingle = m_context.streamInfo.size() == 1;
819 14 : bool allOtherStreamsPushed = std::all_of(m_context.streamInfo.begin(), m_context.streamInfo.end(),
820 15 : [](const auto &entry) { return entry.second.isDataPushed; });
821 :
822 14 : if (!m_context.bufferedNotificationSent && (allOtherStreamsPushed || kIsSingle) && m_gstPlayerClient)
823 : {
824 1 : m_context.bufferedNotificationSent = true;
825 1 : m_gstPlayerClient->notifyNetworkState(NetworkState::BUFFERED);
826 1 : RIALTO_SERVER_LOG_MIL("Buffered NetworkState reached");
827 : }
828 14 : cancelUnderflow(mediaType);
829 :
830 14 : const auto eosInfoIt = m_context.endOfStreamInfo.find(mediaType);
831 14 : if (eosInfoIt != m_context.endOfStreamInfo.end() && eosInfoIt->second == EosState::PENDING)
832 : {
833 0 : setEos(mediaType);
834 : }
835 : }
836 : }
837 :
838 7 : void GstGenericPlayer::updateAudioCaps(int32_t rate, int32_t channels, const std::shared_ptr<CodecData> &codecData)
839 : {
840 7 : auto elem = m_context.streamInfo.find(firebolt::rialto::MediaSourceType::AUDIO);
841 7 : if (elem != m_context.streamInfo.end())
842 : {
843 6 : StreamInfo &streamInfo = elem->second;
844 :
845 6 : constexpr int kInvalidRate{0}, kInvalidChannels{0};
846 6 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(streamInfo.appSrc));
847 6 : GstCaps *newCaps = m_gstWrapper->gstCapsCopy(currentCaps);
848 :
849 6 : if (rate != kInvalidRate)
850 : {
851 3 : m_gstWrapper->gstCapsSetSimple(newCaps, "rate", G_TYPE_INT, rate, NULL);
852 : }
853 :
854 6 : if (channels != kInvalidChannels)
855 : {
856 3 : m_gstWrapper->gstCapsSetSimple(newCaps, "channels", G_TYPE_INT, channels, NULL);
857 : }
858 :
859 6 : setCodecData(newCaps, codecData);
860 :
861 6 : if (!m_gstWrapper->gstCapsIsEqual(currentCaps, newCaps))
862 : {
863 5 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(streamInfo.appSrc), newCaps);
864 : }
865 :
866 6 : m_gstWrapper->gstCapsUnref(newCaps);
867 6 : m_gstWrapper->gstCapsUnref(currentCaps);
868 : }
869 7 : }
870 :
871 8 : void GstGenericPlayer::updateVideoCaps(int32_t width, int32_t height, Fraction frameRate,
872 : const std::shared_ptr<CodecData> &codecData)
873 : {
874 8 : auto elem = m_context.streamInfo.find(firebolt::rialto::MediaSourceType::VIDEO);
875 8 : if (elem != m_context.streamInfo.end())
876 : {
877 7 : StreamInfo &streamInfo = elem->second;
878 :
879 7 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(streamInfo.appSrc));
880 7 : GstCaps *newCaps = m_gstWrapper->gstCapsCopy(currentCaps);
881 :
882 7 : if (width > 0)
883 : {
884 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "width", G_TYPE_INT, width, NULL);
885 : }
886 :
887 7 : if (height > 0)
888 : {
889 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "height", G_TYPE_INT, height, NULL);
890 : }
891 :
892 7 : if ((kUndefinedSize != frameRate.numerator) && (kUndefinedSize != frameRate.denominator))
893 : {
894 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "framerate", GST_TYPE_FRACTION, frameRate.numerator,
895 : frameRate.denominator, NULL);
896 : }
897 :
898 7 : setCodecData(newCaps, codecData);
899 :
900 7 : if (!m_gstWrapper->gstCapsIsEqual(currentCaps, newCaps))
901 : {
902 6 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(streamInfo.appSrc), newCaps);
903 : }
904 :
905 7 : m_gstWrapper->gstCapsUnref(currentCaps);
906 7 : m_gstWrapper->gstCapsUnref(newCaps);
907 : }
908 8 : }
909 :
910 5 : void GstGenericPlayer::addAudioClippingToBuffer(GstBuffer *buffer, uint64_t clippingStart, uint64_t clippingEnd) const
911 : {
912 5 : if (clippingStart || clippingEnd)
913 : {
914 4 : if (m_gstWrapper->gstBufferAddAudioClippingMeta(buffer, GST_FORMAT_TIME, clippingStart, clippingEnd))
915 : {
916 3 : RIALTO_SERVER_LOG_DEBUG("Added audio clipping to buffer %p, start: %" PRIu64 ", end %" PRIu64, buffer,
917 : clippingStart, clippingEnd);
918 : }
919 : else
920 : {
921 1 : RIALTO_SERVER_LOG_WARN("Failed to add audio clipping to buffer %p, start: %" PRIu64 ", end %" PRIu64,
922 : buffer, clippingStart, clippingEnd);
923 : }
924 : }
925 5 : }
926 :
927 13 : bool GstGenericPlayer::setCodecData(GstCaps *caps, const std::shared_ptr<CodecData> &codecData) const
928 : {
929 13 : if (codecData && CodecDataType::BUFFER == codecData->type)
930 : {
931 7 : gpointer memory = m_glibWrapper->gMemdup(codecData->data.data(), codecData->data.size());
932 7 : GstBuffer *buf = m_gstWrapper->gstBufferNewWrapped(memory, codecData->data.size());
933 7 : m_gstWrapper->gstCapsSetSimple(caps, "codec_data", GST_TYPE_BUFFER, buf, nullptr);
934 7 : m_gstWrapper->gstBufferUnref(buf);
935 7 : return true;
936 : }
937 6 : if (codecData && CodecDataType::STRING == codecData->type)
938 : {
939 2 : std::string codecDataStr(codecData->data.begin(), codecData->data.end());
940 2 : m_gstWrapper->gstCapsSetSimple(caps, "codec_data", G_TYPE_STRING, codecDataStr.c_str(), nullptr);
941 2 : return true;
942 : }
943 4 : return false;
944 : }
945 :
946 12 : void GstGenericPlayer::pushSampleIfRequired(GstElement *source, const std::string &typeStr)
947 : {
948 12 : auto initialPosition = m_context.initialPositions.find(source);
949 12 : if (m_context.initialPositions.end() == initialPosition)
950 : {
951 : // Sending initial sample not needed
952 7 : return;
953 : }
954 : // GstAppSrc does not replace segment, if it's the same as previous one.
955 : // It causes problems with position reporing in amlogic devices, so we need to push
956 : // two segments with different reset time value.
957 5 : pushAdditionalSegmentIfRequired(source);
958 :
959 10 : for (const auto &[position, resetTime, appliedRate, stopPosition] : initialPosition->second)
960 : {
961 6 : GstSeekFlags seekFlag = resetTime ? GST_SEEK_FLAG_FLUSH : GST_SEEK_FLAG_NONE;
962 6 : RIALTO_SERVER_LOG_DEBUG("Pushing new %s sample...", typeStr.c_str());
963 6 : GstSegment *segment{m_gstWrapper->gstSegmentNew()};
964 6 : m_gstWrapper->gstSegmentInit(segment, GST_FORMAT_TIME);
965 6 : if (!m_gstWrapper->gstSegmentDoSeek(segment, m_context.playbackRate, GST_FORMAT_TIME, seekFlag,
966 : GST_SEEK_TYPE_SET, position, GST_SEEK_TYPE_SET, stopPosition, nullptr))
967 : {
968 1 : RIALTO_SERVER_LOG_WARN("Segment seek failed.");
969 1 : m_gstWrapper->gstSegmentFree(segment);
970 1 : m_context.initialPositions.erase(initialPosition);
971 1 : return;
972 : }
973 5 : segment->applied_rate = appliedRate;
974 5 : RIALTO_SERVER_LOG_MIL("New %s segment: [%" GST_TIME_FORMAT ", %" GST_TIME_FORMAT
975 : "], rate: %f, appliedRate %f, reset_time: %d\n",
976 : typeStr.c_str(), GST_TIME_ARGS(segment->start), GST_TIME_ARGS(segment->stop),
977 : segment->rate, segment->applied_rate, resetTime);
978 :
979 5 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(source));
980 : // We can't pass buffer in GstSample, because implementation of gst_app_src_push_sample
981 : // uses gst_buffer_copy, which loses RialtoProtectionMeta (that causes problems with EME
982 : // for first frame).
983 5 : GstSample *sample = m_gstWrapper->gstSampleNew(nullptr, currentCaps, segment, nullptr);
984 5 : m_gstWrapper->gstAppSrcPushSample(GST_APP_SRC(source), sample);
985 5 : m_gstWrapper->gstSampleUnref(sample);
986 5 : m_gstWrapper->gstCapsUnref(currentCaps);
987 :
988 5 : m_gstWrapper->gstSegmentFree(segment);
989 : }
990 4 : m_context.currentPosition[source] = initialPosition->second.back();
991 4 : m_context.initialPositions.erase(initialPosition);
992 4 : return;
993 : }
994 :
995 5 : void GstGenericPlayer::pushAdditionalSegmentIfRequired(GstElement *source)
996 : {
997 5 : auto currentPosition = m_context.currentPosition.find(source);
998 5 : if (m_context.currentPosition.end() == currentPosition)
999 : {
1000 4 : return;
1001 : }
1002 1 : auto initialPosition = m_context.initialPositions.find(source);
1003 1 : if (m_context.initialPositions.end() == initialPosition)
1004 : {
1005 0 : return;
1006 : }
1007 2 : if (initialPosition->second.size() == 1 && initialPosition->second.back().resetTime &&
1008 1 : currentPosition->second == initialPosition->second.back())
1009 : {
1010 1 : RIALTO_SERVER_LOG_INFO("Adding additional segment with reset_time = false");
1011 1 : SegmentData additionalSegment = initialPosition->second.back();
1012 1 : additionalSegment.resetTime = false;
1013 1 : initialPosition->second.push_back(additionalSegment);
1014 : }
1015 : }
1016 :
1017 2 : void GstGenericPlayer::setTextTrackPositionIfRequired(GstElement *source)
1018 : {
1019 2 : auto initialPosition = m_context.initialPositions.find(source);
1020 2 : if (m_context.initialPositions.end() == initialPosition)
1021 : {
1022 : // Sending initial sample not needed
1023 1 : return;
1024 : }
1025 :
1026 1 : RIALTO_SERVER_LOG_MIL("New subtitle position set %" GST_TIME_FORMAT,
1027 : GST_TIME_ARGS(initialPosition->second.back().position));
1028 1 : m_glibWrapper->gObjectSet(m_context.subtitleSink, "position",
1029 1 : static_cast<guint64>(initialPosition->second.back().position), nullptr);
1030 :
1031 1 : m_context.initialPositions.erase(initialPosition);
1032 : }
1033 :
1034 7 : bool GstGenericPlayer::reattachSource(const std::unique_ptr<IMediaPipeline::MediaSource> &source)
1035 : {
1036 7 : if (m_context.streamInfo.find(source->getType()) == m_context.streamInfo.end())
1037 : {
1038 1 : RIALTO_SERVER_LOG_ERROR("Unable to switch source, type does not exist");
1039 1 : return false;
1040 : }
1041 6 : if (source->getMimeType().empty())
1042 : {
1043 1 : RIALTO_SERVER_LOG_WARN("Skip switch audio source. Unknown mime type");
1044 1 : return false;
1045 : }
1046 5 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate> audioAttributes{createAudioAttributes(source)};
1047 5 : if (!audioAttributes)
1048 : {
1049 1 : RIALTO_SERVER_LOG_ERROR("Failed to create audio attributes");
1050 1 : return false;
1051 : }
1052 :
1053 4 : long long currentDispPts = getPosition(m_context.pipeline); // NOLINT(runtime/int)
1054 4 : GstCaps *caps{createCapsFromMediaSource(m_gstWrapper, m_glibWrapper, source)};
1055 4 : GstAppSrc *appSrc{GST_APP_SRC(m_context.streamInfo[source->getType()].appSrc)};
1056 4 : GstCaps *oldCaps = m_gstWrapper->gstAppSrcGetCaps(appSrc);
1057 4 : if ((!oldCaps) || (!m_gstWrapper->gstCapsIsEqual(caps, oldCaps)))
1058 : {
1059 3 : RIALTO_SERVER_LOG_DEBUG("Caps not equal. Perform audio track codec channel switch.");
1060 3 : int sampleAttributes{
1061 : 0}; // rdk_gstreamer_utils::performAudioTrackCodecChannelSwitch checks if this param != NULL only.
1062 3 : std::uint32_t status{0}; // must be 0 to make rdk_gstreamer_utils::performAudioTrackCodecChannelSwitch work
1063 3 : unsigned int ui32Delay{0}; // output param
1064 3 : long long audioChangeTargetPts{-1}; // NOLINT(runtime/int) output param. Set audioChangeTargetPts =
1065 : // currentDispPts in rdk_gstreamer_utils function stub
1066 3 : unsigned int audioChangeStage{0}; // Output param. Set to AUDCHG_ALIGN in rdk_gstreamer_utils function stub
1067 3 : gchar *oldCapsCStr = m_gstWrapper->gstCapsToString(oldCaps);
1068 3 : std::string oldCapsStr = std::string(oldCapsCStr);
1069 3 : m_glibWrapper->gFree(oldCapsCStr);
1070 3 : bool audioAac{oldCapsStr.find("audio/mpeg") != std::string::npos};
1071 3 : bool svpEnabled{true}; // assume always true
1072 3 : bool retVal{false}; // Output param. Set to TRUE in rdk_gstreamer_utils function stub
1073 : bool result =
1074 3 : m_rdkGstreamerUtilsWrapper
1075 6 : ->performAudioTrackCodecChannelSwitch(&m_context.playbackGroup, &sampleAttributes, &(*audioAttributes),
1076 : &status, &ui32Delay, &audioChangeTargetPts, ¤tDispPts,
1077 : &audioChangeStage,
1078 : &caps, // may fail for amlogic - that implementation changes
1079 : // this parameter, it's probably used by Netflix later
1080 3 : &audioAac, svpEnabled, GST_ELEMENT(appSrc), &retVal);
1081 :
1082 3 : if (!result || !retVal)
1083 : {
1084 3 : RIALTO_SERVER_LOG_WARN("performAudioTrackCodecChannelSwitch failed! Result: %d, retval %d", result, retVal);
1085 : }
1086 : }
1087 : else
1088 : {
1089 1 : RIALTO_SERVER_LOG_DEBUG("Skip switching audio source - caps are the same.");
1090 : }
1091 :
1092 4 : m_context.lastAudioSampleTimestamps = currentDispPts;
1093 4 : if (caps)
1094 4 : m_gstWrapper->gstCapsUnref(caps);
1095 4 : if (oldCaps)
1096 4 : m_gstWrapper->gstCapsUnref(oldCaps);
1097 :
1098 4 : return true;
1099 5 : }
1100 :
1101 0 : bool GstGenericPlayer::hasSourceType(const MediaSourceType &mediaSourceType) const
1102 : {
1103 0 : return m_context.streamInfo.find(mediaSourceType) != m_context.streamInfo.end();
1104 : }
1105 :
1106 7 : void GstGenericPlayer::scheduleNeedMediaData(GstAppSrc *src)
1107 : {
1108 7 : if (m_workerThread)
1109 : {
1110 7 : if (m_scheduledNeedDatas.isNeedDataScheduled(src))
1111 : {
1112 1 : return;
1113 : }
1114 6 : m_scheduledNeedDatas.setNeedDataScheduled(src);
1115 6 : m_workerThread->enqueueTask(m_taskFactory->createNeedData(m_context, *this, src));
1116 : }
1117 : }
1118 :
1119 2 : void GstGenericPlayer::scheduleEnoughData(GstAppSrc *src)
1120 : {
1121 2 : if (m_workerThread)
1122 : {
1123 2 : clearNeedDataScheduled(src);
1124 2 : m_workerThread->enqueueTask(m_taskFactory->createEnoughData(m_context, src));
1125 : }
1126 : }
1127 :
1128 3 : void GstGenericPlayer::scheduleAudioUnderflow()
1129 : {
1130 3 : if (m_workerThread)
1131 : {
1132 3 : bool underflowEnabled = m_context.isPlaying && !m_context.audioSourceRemoved;
1133 6 : m_workerThread->enqueueTask(
1134 6 : m_taskFactory->createUnderflow(m_context, *this, underflowEnabled, MediaSourceType::AUDIO));
1135 : }
1136 3 : }
1137 :
1138 2 : void GstGenericPlayer::scheduleVideoUnderflow()
1139 : {
1140 2 : if (m_workerThread)
1141 : {
1142 2 : bool underflowEnabled = m_context.isPlaying;
1143 4 : m_workerThread->enqueueTask(
1144 4 : m_taskFactory->createUnderflow(m_context, *this, underflowEnabled, MediaSourceType::VIDEO));
1145 : }
1146 2 : }
1147 :
1148 88 : void GstGenericPlayer::scheduleAllSourcesAttached()
1149 : {
1150 88 : allSourcesAttached();
1151 : }
1152 :
1153 14 : void GstGenericPlayer::cancelUnderflow(firebolt::rialto::MediaSourceType mediaSource)
1154 : {
1155 14 : auto elem = m_context.streamInfo.find(mediaSource);
1156 14 : if (elem != m_context.streamInfo.end())
1157 : {
1158 14 : StreamInfo &streamInfo = elem->second;
1159 14 : if (!streamInfo.underflowOccured)
1160 : {
1161 11 : return;
1162 : }
1163 :
1164 3 : RIALTO_SERVER_LOG_DEBUG("Cancelling %s underflow", common::convertMediaSourceType(mediaSource));
1165 3 : streamInfo.underflowOccured = false;
1166 : }
1167 : }
1168 :
1169 1 : void GstGenericPlayer::play()
1170 : {
1171 1 : if (m_workerThread)
1172 : {
1173 1 : m_workerThread->enqueueTask(m_taskFactory->createPlay(*this));
1174 : }
1175 : }
1176 :
1177 1 : void GstGenericPlayer::pause()
1178 : {
1179 1 : if (m_workerThread)
1180 : {
1181 1 : m_workerThread->enqueueTask(m_taskFactory->createPause(m_context, *this));
1182 : }
1183 : }
1184 :
1185 1 : void GstGenericPlayer::stop()
1186 : {
1187 1 : if (m_workerThread)
1188 : {
1189 1 : m_workerThread->enqueueTask(m_taskFactory->createStop(m_context, *this));
1190 : }
1191 : }
1192 :
1193 4 : bool GstGenericPlayer::changePipelineState(GstState newState)
1194 : {
1195 4 : if (!m_context.pipeline)
1196 : {
1197 1 : RIALTO_SERVER_LOG_ERROR("Change state failed - pipeline is nullptr");
1198 1 : if (m_gstPlayerClient)
1199 1 : m_gstPlayerClient->notifyPlaybackState(PlaybackState::FAILURE);
1200 1 : return false;
1201 : }
1202 3 : if (m_gstWrapper->gstElementSetState(m_context.pipeline, newState) == GST_STATE_CHANGE_FAILURE)
1203 : {
1204 1 : RIALTO_SERVER_LOG_ERROR("Change state failed - Gstreamer returned an error");
1205 1 : if (m_gstPlayerClient)
1206 1 : m_gstPlayerClient->notifyPlaybackState(PlaybackState::FAILURE);
1207 1 : return false;
1208 : }
1209 2 : return true;
1210 : }
1211 :
1212 10 : int64_t GstGenericPlayer::getPosition(GstElement *element)
1213 : {
1214 10 : if (!element)
1215 : {
1216 1 : RIALTO_SERVER_LOG_WARN("Element is null");
1217 1 : return -1;
1218 : }
1219 :
1220 9 : if (m_flushWatcher->isFlushOngoing())
1221 : {
1222 1 : RIALTO_SERVER_LOG_WARN("Can't get position while flush is ongoing");
1223 1 : return -1;
1224 : }
1225 :
1226 8 : m_gstWrapper->gstStateLock(element);
1227 :
1228 8 : const auto kElementState{m_gstWrapper->gstElementGetState(element)};
1229 8 : const auto kStateChangeReturn{m_gstWrapper->gstElementGetStateReturn(element)};
1230 8 : const auto kNextState{m_gstWrapper->gstElementGetStateNext(element)};
1231 8 : if (kElementState < GST_STATE_PAUSED ||
1232 1 : (kStateChangeReturn == GST_STATE_CHANGE_ASYNC && kNextState == GST_STATE_PAUSED))
1233 : {
1234 1 : RIALTO_SERVER_LOG_WARN("Element is prerolling or in invalid state - state: %s, return: %s, next: %s",
1235 : m_gstWrapper->gstElementStateGetName(kElementState),
1236 : m_gstWrapper->gstElementStateChangeReturnGetName(kStateChangeReturn),
1237 : m_gstWrapper->gstElementStateGetName(kNextState));
1238 :
1239 1 : m_gstWrapper->gstStateUnlock(element);
1240 1 : return -1;
1241 : }
1242 7 : m_gstWrapper->gstStateUnlock(element);
1243 :
1244 7 : gint64 position = -1;
1245 7 : if (!m_gstWrapper->gstElementQueryPosition(m_context.pipeline, GST_FORMAT_TIME, &position))
1246 : {
1247 1 : RIALTO_SERVER_LOG_WARN("Failed to query position");
1248 1 : return -1;
1249 : }
1250 :
1251 6 : return position;
1252 : }
1253 :
1254 1 : void GstGenericPlayer::setVideoGeometry(int x, int y, int width, int height)
1255 : {
1256 1 : if (m_workerThread)
1257 : {
1258 2 : m_workerThread->enqueueTask(
1259 2 : m_taskFactory->createSetVideoGeometry(m_context, *this, Rectangle{x, y, width, height}));
1260 : }
1261 1 : }
1262 :
1263 1 : void GstGenericPlayer::setEos(const firebolt::rialto::MediaSourceType &type)
1264 : {
1265 1 : if (m_workerThread)
1266 : {
1267 1 : m_workerThread->enqueueTask(m_taskFactory->createEos(m_context, *this, type));
1268 : }
1269 : }
1270 :
1271 4 : bool GstGenericPlayer::setVideoSinkRectangle()
1272 : {
1273 4 : bool result = false;
1274 4 : GstElement *videoSink{getSink(MediaSourceType::VIDEO)};
1275 4 : if (videoSink)
1276 : {
1277 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "rectangle"))
1278 : {
1279 : std::string rect =
1280 4 : std::to_string(m_context.pendingGeometry.x) + ',' + std::to_string(m_context.pendingGeometry.y) + ',' +
1281 6 : std::to_string(m_context.pendingGeometry.width) + ',' + std::to_string(m_context.pendingGeometry.height);
1282 2 : m_glibWrapper->gObjectSet(videoSink, "rectangle", rect.c_str(), nullptr);
1283 2 : m_context.pendingGeometry.clear();
1284 2 : result = true;
1285 : }
1286 : else
1287 : {
1288 1 : RIALTO_SERVER_LOG_ERROR("Failed to set the video rectangle");
1289 : }
1290 3 : m_gstWrapper->gstObjectUnref(videoSink);
1291 : }
1292 :
1293 4 : return result;
1294 : }
1295 :
1296 3 : bool GstGenericPlayer::setImmediateOutput()
1297 : {
1298 3 : bool result{false};
1299 3 : if (m_context.pendingImmediateOutputForVideo.has_value())
1300 : {
1301 3 : GstElement *sink{getSink(MediaSourceType::VIDEO)};
1302 3 : if (sink)
1303 : {
1304 2 : bool immediateOutput{m_context.pendingImmediateOutputForVideo.value()};
1305 2 : RIALTO_SERVER_LOG_DEBUG("Set immediate-output to %s", immediateOutput ? "TRUE" : "FALSE");
1306 :
1307 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "immediate-output"))
1308 : {
1309 1 : gboolean immediateOutputGboolean{immediateOutput ? TRUE : FALSE};
1310 1 : m_glibWrapper->gObjectSet(sink, "immediate-output", immediateOutputGboolean, nullptr);
1311 1 : result = true;
1312 : }
1313 : else
1314 : {
1315 1 : RIALTO_SERVER_LOG_ERROR("Failed to set immediate-output property on sink '%s'", GST_ELEMENT_NAME(sink));
1316 : }
1317 2 : m_context.pendingImmediateOutputForVideo.reset();
1318 2 : m_gstWrapper->gstObjectUnref(sink);
1319 : }
1320 : else
1321 : {
1322 1 : RIALTO_SERVER_LOG_DEBUG("Pending an immediate-output, sink is NULL");
1323 : }
1324 : }
1325 3 : return result;
1326 : }
1327 :
1328 4 : bool GstGenericPlayer::setShowVideoWindow()
1329 : {
1330 4 : if (!m_context.pendingShowVideoWindow.has_value())
1331 : {
1332 1 : RIALTO_SERVER_LOG_WARN("No show video window value to be set. Aborting...");
1333 1 : return false;
1334 : }
1335 :
1336 3 : GstElement *videoSink{getSink(MediaSourceType::VIDEO)};
1337 3 : if (!videoSink)
1338 : {
1339 1 : RIALTO_SERVER_LOG_DEBUG("Setting show video window queued. Video sink is NULL");
1340 1 : return false;
1341 : }
1342 2 : bool result{false};
1343 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "show-video-window"))
1344 : {
1345 1 : m_glibWrapper->gObjectSet(videoSink, "show-video-window", m_context.pendingShowVideoWindow.value(), nullptr);
1346 1 : result = true;
1347 : }
1348 : else
1349 : {
1350 1 : RIALTO_SERVER_LOG_ERROR("Setting show video window failed. Property does not exist");
1351 : }
1352 2 : m_context.pendingShowVideoWindow.reset();
1353 2 : m_gstWrapper->gstObjectUnref(GST_OBJECT(videoSink));
1354 2 : return result;
1355 : }
1356 :
1357 4 : bool GstGenericPlayer::setLowLatency()
1358 : {
1359 4 : bool result{false};
1360 4 : if (m_context.pendingLowLatency.has_value())
1361 : {
1362 4 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1363 4 : if (sink)
1364 : {
1365 3 : bool lowLatency{m_context.pendingLowLatency.value()};
1366 3 : RIALTO_SERVER_LOG_DEBUG("Set low-latency to %s", lowLatency ? "TRUE" : "FALSE");
1367 :
1368 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "low-latency"))
1369 : {
1370 2 : gboolean lowLatencyGboolean{lowLatency ? TRUE : FALSE};
1371 2 : m_glibWrapper->gObjectSet(sink, "low-latency", lowLatencyGboolean, nullptr);
1372 2 : result = true;
1373 : }
1374 : else
1375 : {
1376 1 : RIALTO_SERVER_LOG_ERROR("Failed to set low-latency property on sink '%s'", GST_ELEMENT_NAME(sink));
1377 : }
1378 3 : m_context.pendingLowLatency.reset();
1379 3 : m_gstWrapper->gstObjectUnref(sink);
1380 : }
1381 : else
1382 : {
1383 1 : RIALTO_SERVER_LOG_DEBUG("Pending low-latency, sink is NULL");
1384 : }
1385 : }
1386 4 : return result;
1387 : }
1388 :
1389 3 : bool GstGenericPlayer::setSync()
1390 : {
1391 3 : bool result{false};
1392 3 : if (m_context.pendingSync.has_value())
1393 : {
1394 3 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1395 3 : if (sink)
1396 : {
1397 2 : bool sync{m_context.pendingSync.value()};
1398 2 : RIALTO_SERVER_LOG_DEBUG("Set sync to %s", sync ? "TRUE" : "FALSE");
1399 :
1400 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "sync"))
1401 : {
1402 1 : gboolean syncGboolean{sync ? TRUE : FALSE};
1403 1 : m_glibWrapper->gObjectSet(sink, "sync", syncGboolean, nullptr);
1404 1 : result = true;
1405 : }
1406 : else
1407 : {
1408 1 : RIALTO_SERVER_LOG_ERROR("Failed to set sync property on sink '%s'", GST_ELEMENT_NAME(sink));
1409 : }
1410 2 : m_context.pendingSync.reset();
1411 2 : m_gstWrapper->gstObjectUnref(sink);
1412 : }
1413 : else
1414 : {
1415 1 : RIALTO_SERVER_LOG_DEBUG("Pending sync, sink is NULL");
1416 : }
1417 : }
1418 3 : return result;
1419 : }
1420 :
1421 3 : bool GstGenericPlayer::setSyncOff()
1422 : {
1423 3 : bool result{false};
1424 3 : if (m_context.pendingSyncOff.has_value())
1425 : {
1426 3 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1427 3 : if (decoder)
1428 : {
1429 2 : bool syncOff{m_context.pendingSyncOff.value()};
1430 2 : RIALTO_SERVER_LOG_DEBUG("Set sync-off to %s", syncOff ? "TRUE" : "FALSE");
1431 :
1432 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "sync-off"))
1433 : {
1434 1 : gboolean syncOffGboolean{decoder ? TRUE : FALSE};
1435 1 : m_glibWrapper->gObjectSet(decoder, "sync-off", syncOffGboolean, nullptr);
1436 1 : result = true;
1437 : }
1438 : else
1439 : {
1440 1 : RIALTO_SERVER_LOG_ERROR("Failed to set sync-off property on decoder '%s'", GST_ELEMENT_NAME(decoder));
1441 : }
1442 2 : m_context.pendingSyncOff.reset();
1443 2 : m_gstWrapper->gstObjectUnref(decoder);
1444 : }
1445 : else
1446 : {
1447 1 : RIALTO_SERVER_LOG_DEBUG("Pending sync-off, decoder is NULL");
1448 : }
1449 : }
1450 3 : return result;
1451 : }
1452 :
1453 6 : bool GstGenericPlayer::setStreamSyncMode(const MediaSourceType &type)
1454 : {
1455 6 : bool result{false};
1456 6 : int32_t streamSyncMode{0};
1457 : {
1458 6 : std::unique_lock lock{m_context.propertyMutex};
1459 6 : if (m_context.pendingStreamSyncMode.find(type) == m_context.pendingStreamSyncMode.end())
1460 : {
1461 0 : return false;
1462 : }
1463 6 : streamSyncMode = m_context.pendingStreamSyncMode[type];
1464 : }
1465 6 : if (MediaSourceType::AUDIO == type)
1466 : {
1467 3 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1468 3 : if (!decoder)
1469 : {
1470 1 : RIALTO_SERVER_LOG_DEBUG("Pending stream-sync-mode, decoder is NULL");
1471 1 : return false;
1472 : }
1473 :
1474 2 : RIALTO_SERVER_LOG_DEBUG("Set stream-sync-mode to %d", streamSyncMode);
1475 :
1476 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "stream-sync-mode"))
1477 : {
1478 1 : gint streamSyncModeGint{static_cast<gint>(streamSyncMode)};
1479 1 : m_glibWrapper->gObjectSet(decoder, "stream-sync-mode", streamSyncModeGint, nullptr);
1480 1 : result = true;
1481 : }
1482 : else
1483 : {
1484 1 : RIALTO_SERVER_LOG_ERROR("Failed to set stream-sync-mode property on decoder '%s'", GST_ELEMENT_NAME(decoder));
1485 : }
1486 2 : m_gstWrapper->gstObjectUnref(decoder);
1487 2 : std::unique_lock lock{m_context.propertyMutex};
1488 2 : m_context.pendingStreamSyncMode.erase(type);
1489 : }
1490 3 : else if (MediaSourceType::VIDEO == type)
1491 : {
1492 3 : GstElement *parser = getParser(MediaSourceType::VIDEO);
1493 3 : if (!parser)
1494 : {
1495 1 : RIALTO_SERVER_LOG_DEBUG("Pending syncmode-streaming, parser is NULL");
1496 1 : return false;
1497 : }
1498 :
1499 2 : gboolean streamSyncModeBoolean{static_cast<gboolean>(streamSyncMode)};
1500 2 : RIALTO_SERVER_LOG_DEBUG("Set syncmode-streaming to %d", streamSyncMode);
1501 :
1502 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(parser), "syncmode-streaming"))
1503 : {
1504 1 : m_glibWrapper->gObjectSet(parser, "syncmode-streaming", streamSyncModeBoolean, nullptr);
1505 1 : result = true;
1506 : }
1507 : else
1508 : {
1509 1 : RIALTO_SERVER_LOG_ERROR("Failed to set syncmode-streaming property on parser '%s'", GST_ELEMENT_NAME(parser));
1510 : }
1511 2 : m_gstWrapper->gstObjectUnref(parser);
1512 2 : std::unique_lock lock{m_context.propertyMutex};
1513 2 : m_context.pendingStreamSyncMode.erase(type);
1514 : }
1515 4 : return result;
1516 : }
1517 :
1518 3 : bool GstGenericPlayer::setRenderFrame()
1519 : {
1520 3 : bool result{false};
1521 3 : if (m_context.pendingRenderFrame)
1522 : {
1523 5 : static const std::string kStepOnPrerollPropertyName = "frame-step-on-preroll";
1524 3 : GstElement *sink{getSink(MediaSourceType::VIDEO)};
1525 3 : if (sink)
1526 : {
1527 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), kStepOnPrerollPropertyName.c_str()))
1528 : {
1529 1 : RIALTO_SERVER_LOG_INFO("Rendering preroll");
1530 :
1531 1 : m_glibWrapper->gObjectSet(sink, kStepOnPrerollPropertyName.c_str(), 1, nullptr);
1532 1 : m_gstWrapper->gstElementSendEvent(sink, m_gstWrapper->gstEventNewStep(GST_FORMAT_BUFFERS, 1, 1.0, true,
1533 : false));
1534 1 : m_glibWrapper->gObjectSet(sink, kStepOnPrerollPropertyName.c_str(), 0, nullptr);
1535 1 : result = true;
1536 : }
1537 : else
1538 : {
1539 1 : RIALTO_SERVER_LOG_ERROR("Video sink doesn't have property `%s`", kStepOnPrerollPropertyName.c_str());
1540 : }
1541 2 : m_gstWrapper->gstObjectUnref(sink);
1542 2 : m_context.pendingRenderFrame = false;
1543 : }
1544 : else
1545 : {
1546 1 : RIALTO_SERVER_LOG_DEBUG("Pending render frame, sink is NULL");
1547 : }
1548 : }
1549 3 : return result;
1550 : }
1551 :
1552 3 : bool GstGenericPlayer::setBufferingLimit()
1553 : {
1554 3 : bool result{false};
1555 3 : guint bufferingLimit{0};
1556 : {
1557 3 : std::unique_lock lock{m_context.propertyMutex};
1558 3 : if (!m_context.pendingBufferingLimit.has_value())
1559 : {
1560 0 : return false;
1561 : }
1562 3 : bufferingLimit = static_cast<guint>(m_context.pendingBufferingLimit.value());
1563 : }
1564 :
1565 3 : GstElement *decoder{getDecoder(MediaSourceType::AUDIO)};
1566 3 : if (decoder)
1567 : {
1568 2 : RIALTO_SERVER_LOG_DEBUG("Set limit-buffering-ms to %u", bufferingLimit);
1569 :
1570 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "limit-buffering-ms"))
1571 : {
1572 1 : m_glibWrapper->gObjectSet(decoder, "limit-buffering-ms", bufferingLimit, nullptr);
1573 1 : result = true;
1574 : }
1575 : else
1576 : {
1577 1 : RIALTO_SERVER_LOG_ERROR("Failed to set limit-buffering-ms property on decoder '%s'",
1578 : GST_ELEMENT_NAME(decoder));
1579 : }
1580 2 : m_gstWrapper->gstObjectUnref(decoder);
1581 2 : std::unique_lock lock{m_context.propertyMutex};
1582 2 : m_context.pendingBufferingLimit.reset();
1583 : }
1584 : else
1585 : {
1586 1 : RIALTO_SERVER_LOG_DEBUG("Pending limit-buffering-ms, decoder is NULL");
1587 : }
1588 3 : return result;
1589 : }
1590 :
1591 2 : bool GstGenericPlayer::setUseBuffering()
1592 : {
1593 2 : std::unique_lock lock{m_context.propertyMutex};
1594 2 : if (m_context.pendingUseBuffering.has_value())
1595 : {
1596 2 : if (m_context.playbackGroup.m_curAudioDecodeBin)
1597 : {
1598 1 : gboolean useBufferingGboolean{m_context.pendingUseBuffering.value() ? TRUE : FALSE};
1599 1 : RIALTO_SERVER_LOG_DEBUG("Set use-buffering to %d", useBufferingGboolean);
1600 1 : m_glibWrapper->gObjectSet(m_context.playbackGroup.m_curAudioDecodeBin, "use-buffering",
1601 : useBufferingGboolean, nullptr);
1602 1 : m_context.pendingUseBuffering.reset();
1603 1 : return true;
1604 : }
1605 : else
1606 : {
1607 1 : RIALTO_SERVER_LOG_DEBUG("Pending use-buffering, decodebin is NULL");
1608 : }
1609 : }
1610 1 : return false;
1611 2 : }
1612 :
1613 8 : bool GstGenericPlayer::setWesterossinkSecondaryVideo()
1614 : {
1615 8 : bool result = false;
1616 8 : GstElementFactory *factory = m_gstWrapper->gstElementFactoryFind("westerossink");
1617 8 : if (factory)
1618 : {
1619 7 : GstElement *videoSink = m_gstWrapper->gstElementFactoryCreate(factory, nullptr);
1620 7 : if (videoSink)
1621 : {
1622 5 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "res-usage"))
1623 : {
1624 4 : m_glibWrapper->gObjectSet(videoSink, "res-usage", 0x0u, nullptr);
1625 4 : m_glibWrapper->gObjectSet(m_context.pipeline, "video-sink", videoSink, nullptr);
1626 4 : result = true;
1627 : }
1628 : else
1629 : {
1630 1 : RIALTO_SERVER_LOG_ERROR("Failed to set the westerossink res-usage");
1631 1 : m_gstWrapper->gstObjectUnref(GST_OBJECT(videoSink));
1632 : }
1633 : }
1634 : else
1635 : {
1636 2 : RIALTO_SERVER_LOG_ERROR("Failed to create the westerossink");
1637 : }
1638 :
1639 7 : m_gstWrapper->gstObjectUnref(GST_OBJECT(factory));
1640 : }
1641 : else
1642 : {
1643 : // No westeros sink
1644 1 : result = true;
1645 : }
1646 :
1647 8 : return result;
1648 : }
1649 :
1650 8 : bool GstGenericPlayer::setErmContext()
1651 : {
1652 8 : bool result = false;
1653 8 : GstContext *context = m_gstWrapper->gstContextNew("erm", false);
1654 8 : if (context)
1655 : {
1656 6 : GstStructure *contextStructure = m_gstWrapper->gstContextWritableStructure(context);
1657 6 : if (contextStructure)
1658 : {
1659 5 : m_gstWrapper->gstStructureSet(contextStructure, "res-usage", G_TYPE_UINT, 0x0u, nullptr);
1660 5 : m_gstWrapper->gstElementSetContext(GST_ELEMENT(m_context.pipeline), context);
1661 5 : result = true;
1662 : }
1663 : else
1664 : {
1665 1 : RIALTO_SERVER_LOG_ERROR("Failed to create the erm structure");
1666 : }
1667 6 : m_gstWrapper->gstContextUnref(context);
1668 : }
1669 : else
1670 : {
1671 2 : RIALTO_SERVER_LOG_ERROR("Failed to create the erm context");
1672 : }
1673 :
1674 8 : return result;
1675 : }
1676 :
1677 6 : void GstGenericPlayer::startPositionReportingAndCheckAudioUnderflowTimer()
1678 : {
1679 6 : if (m_positionReportingAndCheckAudioUnderflowTimer && m_positionReportingAndCheckAudioUnderflowTimer->isActive())
1680 : {
1681 1 : return;
1682 : }
1683 :
1684 15 : m_positionReportingAndCheckAudioUnderflowTimer = m_timerFactory->createTimer(
1685 : kPositionReportTimerMs,
1686 10 : [this]()
1687 : {
1688 1 : if (m_workerThread)
1689 : {
1690 1 : m_workerThread->enqueueTask(m_taskFactory->createReportPosition(m_context, *this));
1691 1 : m_workerThread->enqueueTask(m_taskFactory->createCheckAudioUnderflow(m_context, *this));
1692 : }
1693 1 : },
1694 5 : firebolt::rialto::common::TimerType::PERIODIC);
1695 : }
1696 :
1697 4 : void GstGenericPlayer::stopPositionReportingAndCheckAudioUnderflowTimer()
1698 : {
1699 4 : if (m_positionReportingAndCheckAudioUnderflowTimer && m_positionReportingAndCheckAudioUnderflowTimer->isActive())
1700 : {
1701 1 : m_positionReportingAndCheckAudioUnderflowTimer->cancel();
1702 1 : m_positionReportingAndCheckAudioUnderflowTimer.reset();
1703 : }
1704 4 : }
1705 :
1706 0 : void GstGenericPlayer::startSubtitleClockResyncTimer()
1707 : {
1708 0 : if (m_subtitleClockResyncTimer && m_subtitleClockResyncTimer->isActive())
1709 : {
1710 0 : return;
1711 : }
1712 :
1713 0 : m_subtitleClockResyncTimer = m_timerFactory->createTimer(
1714 : kSubtitleClockResyncInterval,
1715 0 : [this]()
1716 : {
1717 0 : if (m_workerThread)
1718 : {
1719 0 : m_workerThread->enqueueTask(m_taskFactory->createSynchroniseSubtitleClock(m_context, *this));
1720 : }
1721 0 : },
1722 0 : firebolt::rialto::common::TimerType::PERIODIC);
1723 : }
1724 :
1725 0 : void GstGenericPlayer::stopSubtitleClockResyncTimer()
1726 : {
1727 0 : if (m_subtitleClockResyncTimer && m_subtitleClockResyncTimer->isActive())
1728 : {
1729 0 : m_subtitleClockResyncTimer->cancel();
1730 0 : m_subtitleClockResyncTimer.reset();
1731 : }
1732 : }
1733 :
1734 2 : void GstGenericPlayer::stopWorkerThread()
1735 : {
1736 2 : if (m_workerThread)
1737 : {
1738 2 : m_workerThread->stop();
1739 : }
1740 : }
1741 :
1742 0 : void GstGenericPlayer::setPendingPlaybackRate()
1743 : {
1744 0 : RIALTO_SERVER_LOG_INFO("Setting pending playback rate");
1745 0 : setPlaybackRate(m_context.pendingPlaybackRate);
1746 : }
1747 :
1748 1 : void GstGenericPlayer::renderFrame()
1749 : {
1750 1 : if (m_workerThread)
1751 : {
1752 1 : m_workerThread->enqueueTask(m_taskFactory->createRenderFrame(m_context, *this));
1753 : }
1754 : }
1755 :
1756 18 : void GstGenericPlayer::setVolume(double targetVolume, uint32_t volumeDuration, firebolt::rialto::EaseType easeType)
1757 : {
1758 18 : if (m_workerThread)
1759 : {
1760 36 : m_workerThread->enqueueTask(
1761 36 : m_taskFactory->createSetVolume(m_context, *this, targetVolume, volumeDuration, easeType));
1762 : }
1763 18 : }
1764 :
1765 3 : bool GstGenericPlayer::getVolume(double ¤tVolume)
1766 : {
1767 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
1768 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
1769 3 : if (!m_context.pipeline)
1770 : {
1771 0 : return false;
1772 : }
1773 :
1774 : // NOTE: No gstreamer documentation for "fade-volume" could be found at the time this code was written.
1775 : // Therefore the author performed several tests on a supported platform (Flex2) to determine the behaviour of this property.
1776 : // The code has been written to be backwardly compatible on platforms that don't have this property.
1777 : // The observed behaviour was:
1778 : // - if the returned fade volume is negative then audio-fade is not active. In this case the usual technique
1779 : // to find volume in the pipeline works and is used.
1780 : // - if the returned fade volume is positive then audio-fade is active. In this case the returned fade volume
1781 : // directly returns the current volume level 0=min to 100=max (and the pipeline's current volume level is
1782 : // meaningless and doesn't contribute in this case).
1783 3 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1784 5 : if (m_context.audioFadeEnabled && sink &&
1785 2 : m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "fade-volume"))
1786 : {
1787 2 : gint fadeVolume{-100};
1788 2 : m_glibWrapper->gObjectGet(sink, "fade-volume", &fadeVolume, NULL);
1789 2 : if (fadeVolume < 0)
1790 : {
1791 1 : currentVolume = m_gstWrapper->gstStreamVolumeGetVolume(GST_STREAM_VOLUME(m_context.pipeline),
1792 : GST_STREAM_VOLUME_FORMAT_LINEAR);
1793 1 : RIALTO_SERVER_LOG_INFO("Fade volume is negative, using volume from pipeline: %f", currentVolume);
1794 : }
1795 : else
1796 : {
1797 1 : currentVolume = static_cast<double>(fadeVolume) / 100.0;
1798 1 : RIALTO_SERVER_LOG_INFO("Fade volume is supported: %f", currentVolume);
1799 : }
1800 : }
1801 : else
1802 : {
1803 1 : currentVolume = m_gstWrapper->gstStreamVolumeGetVolume(GST_STREAM_VOLUME(m_context.pipeline),
1804 : GST_STREAM_VOLUME_FORMAT_LINEAR);
1805 1 : RIALTO_SERVER_LOG_INFO("Fade volume is not supported, using volume from pipeline: %f", currentVolume);
1806 : }
1807 :
1808 3 : if (sink)
1809 2 : m_gstWrapper->gstObjectUnref(sink);
1810 :
1811 3 : return true;
1812 : }
1813 :
1814 1 : void GstGenericPlayer::setMute(const MediaSourceType &mediaSourceType, bool mute)
1815 : {
1816 1 : if (m_workerThread)
1817 : {
1818 1 : m_workerThread->enqueueTask(m_taskFactory->createSetMute(m_context, *this, mediaSourceType, mute));
1819 : }
1820 : }
1821 :
1822 5 : bool GstGenericPlayer::getMute(const MediaSourceType &mediaSourceType, bool &mute)
1823 : {
1824 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
1825 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
1826 5 : if (mediaSourceType == MediaSourceType::SUBTITLE)
1827 : {
1828 2 : if (!m_context.subtitleSink)
1829 : {
1830 1 : RIALTO_SERVER_LOG_ERROR("There is no subtitle sink");
1831 1 : return false;
1832 : }
1833 1 : gboolean muteValue{FALSE};
1834 1 : m_glibWrapper->gObjectGet(m_context.subtitleSink, "mute", &muteValue, nullptr);
1835 1 : mute = muteValue;
1836 : }
1837 3 : else if (mediaSourceType == MediaSourceType::AUDIO)
1838 : {
1839 2 : if (!m_context.pipeline)
1840 : {
1841 1 : return false;
1842 : }
1843 1 : mute = m_gstWrapper->gstStreamVolumeGetMute(GST_STREAM_VOLUME(m_context.pipeline));
1844 : }
1845 : else
1846 : {
1847 1 : RIALTO_SERVER_LOG_ERROR("Getting mute for type %s unsupported", common::convertMediaSourceType(mediaSourceType));
1848 1 : return false;
1849 : }
1850 :
1851 2 : return true;
1852 : }
1853 :
1854 1 : bool GstGenericPlayer::isAsync(const MediaSourceType &mediaSourceType) const
1855 : {
1856 1 : GstElement *sink = getSink(mediaSourceType);
1857 1 : if (!sink)
1858 : {
1859 0 : RIALTO_SERVER_LOG_WARN("Sink not found for %s", common::convertMediaSourceType(mediaSourceType));
1860 0 : return true; // Our sinks are async by default
1861 : }
1862 1 : gboolean returnValue{TRUE};
1863 1 : m_glibWrapper->gObjectGet(sink, "async", &returnValue, nullptr);
1864 1 : m_gstWrapper->gstObjectUnref(sink);
1865 1 : return returnValue == TRUE;
1866 : }
1867 :
1868 1 : void GstGenericPlayer::setTextTrackIdentifier(const std::string &textTrackIdentifier)
1869 : {
1870 1 : if (m_workerThread)
1871 : {
1872 1 : m_workerThread->enqueueTask(m_taskFactory->createSetTextTrackIdentifier(m_context, textTrackIdentifier));
1873 : }
1874 : }
1875 :
1876 3 : bool GstGenericPlayer::getTextTrackIdentifier(std::string &textTrackIdentifier)
1877 : {
1878 3 : if (!m_context.subtitleSink)
1879 : {
1880 1 : RIALTO_SERVER_LOG_ERROR("There is no subtitle sink");
1881 1 : return false;
1882 : }
1883 :
1884 2 : gchar *identifier = nullptr;
1885 2 : m_glibWrapper->gObjectGet(m_context.subtitleSink, "text-track-identifier", &identifier, nullptr);
1886 :
1887 2 : if (identifier)
1888 : {
1889 1 : textTrackIdentifier = identifier;
1890 1 : m_glibWrapper->gFree(identifier);
1891 1 : return true;
1892 : }
1893 : else
1894 : {
1895 1 : RIALTO_SERVER_LOG_ERROR("Failed to get text track identifier");
1896 1 : return false;
1897 : }
1898 : }
1899 :
1900 1 : bool GstGenericPlayer::setLowLatency(bool lowLatency)
1901 : {
1902 1 : if (m_workerThread)
1903 : {
1904 1 : m_workerThread->enqueueTask(m_taskFactory->createSetLowLatency(m_context, *this, lowLatency));
1905 : }
1906 1 : return true;
1907 : }
1908 :
1909 1 : bool GstGenericPlayer::setSync(bool sync)
1910 : {
1911 1 : if (m_workerThread)
1912 : {
1913 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSync(m_context, *this, sync));
1914 : }
1915 1 : return true;
1916 : }
1917 :
1918 4 : bool GstGenericPlayer::getSync(bool &sync)
1919 : {
1920 4 : bool returnValue{false};
1921 4 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1922 4 : if (sink)
1923 : {
1924 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "sync"))
1925 : {
1926 1 : m_glibWrapper->gObjectGet(sink, "sync", &sync, nullptr);
1927 1 : returnValue = true;
1928 : }
1929 : else
1930 : {
1931 1 : RIALTO_SERVER_LOG_ERROR("Sync not supported in sink '%s'", GST_ELEMENT_NAME(sink));
1932 : }
1933 2 : m_gstWrapper->gstObjectUnref(sink);
1934 : }
1935 2 : else if (m_context.pendingSync.has_value())
1936 : {
1937 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
1938 1 : sync = m_context.pendingSync.value();
1939 1 : returnValue = true;
1940 : }
1941 : else
1942 : {
1943 : // We dont know the default setting on the sync, so return failure here
1944 1 : RIALTO_SERVER_LOG_WARN("No audio sink attached or queued value");
1945 : }
1946 :
1947 4 : return returnValue;
1948 : }
1949 :
1950 1 : bool GstGenericPlayer::setSyncOff(bool syncOff)
1951 : {
1952 1 : if (m_workerThread)
1953 : {
1954 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSyncOff(m_context, *this, syncOff));
1955 : }
1956 1 : return true;
1957 : }
1958 :
1959 1 : bool GstGenericPlayer::setStreamSyncMode(const MediaSourceType &mediaSourceType, int32_t streamSyncMode)
1960 : {
1961 1 : if (m_workerThread)
1962 : {
1963 2 : m_workerThread->enqueueTask(
1964 2 : m_taskFactory->createSetStreamSyncMode(m_context, *this, mediaSourceType, streamSyncMode));
1965 : }
1966 1 : return true;
1967 : }
1968 :
1969 5 : bool GstGenericPlayer::getStreamSyncMode(int32_t &streamSyncMode)
1970 : {
1971 5 : bool returnValue{false};
1972 5 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1973 5 : if (decoder && m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "stream-sync-mode"))
1974 : {
1975 2 : m_glibWrapper->gObjectGet(decoder, "stream-sync-mode", &streamSyncMode, nullptr);
1976 2 : returnValue = true;
1977 : }
1978 : else
1979 : {
1980 3 : std::unique_lock lock{m_context.propertyMutex};
1981 3 : if (m_context.pendingStreamSyncMode.find(MediaSourceType::AUDIO) != m_context.pendingStreamSyncMode.end())
1982 : {
1983 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
1984 1 : streamSyncMode = m_context.pendingStreamSyncMode[MediaSourceType::AUDIO];
1985 1 : returnValue = true;
1986 : }
1987 : else
1988 : {
1989 2 : RIALTO_SERVER_LOG_ERROR("Stream sync mode not supported in decoder '%s'",
1990 : (decoder ? GST_ELEMENT_NAME(decoder) : "null"));
1991 : }
1992 3 : }
1993 :
1994 5 : if (decoder)
1995 3 : m_gstWrapper->gstObjectUnref(GST_OBJECT(decoder));
1996 :
1997 5 : return returnValue;
1998 : }
1999 :
2000 1 : void GstGenericPlayer::ping(std::unique_ptr<IHeartbeatHandler> &&heartbeatHandler)
2001 : {
2002 1 : if (m_workerThread)
2003 : {
2004 1 : m_workerThread->enqueueTask(m_taskFactory->createPing(std::move(heartbeatHandler)));
2005 : }
2006 : }
2007 :
2008 1 : void GstGenericPlayer::flush(const MediaSourceType &mediaSourceType, bool resetTime, bool &async)
2009 : {
2010 1 : if (m_workerThread)
2011 : {
2012 1 : async = isAsync(mediaSourceType);
2013 1 : m_flushWatcher->setFlushing(mediaSourceType, async);
2014 1 : m_workerThread->enqueueTask(m_taskFactory->createFlush(m_context, *this, mediaSourceType, resetTime));
2015 : }
2016 : }
2017 :
2018 1 : void GstGenericPlayer::setSourcePosition(const MediaSourceType &mediaSourceType, int64_t position, bool resetTime,
2019 : double appliedRate, uint64_t stopPosition)
2020 : {
2021 1 : if (m_workerThread)
2022 : {
2023 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSourcePosition(m_context, *this, mediaSourceType, position,
2024 : resetTime, appliedRate, stopPosition));
2025 : }
2026 : }
2027 :
2028 0 : void GstGenericPlayer::setSubtitleOffset(int64_t position)
2029 : {
2030 0 : if (m_workerThread)
2031 : {
2032 0 : m_workerThread->enqueueTask(m_taskFactory->createSetSubtitleOffset(m_context, position));
2033 : }
2034 : }
2035 :
2036 1 : void GstGenericPlayer::processAudioGap(int64_t position, uint32_t duration, int64_t discontinuityGap, bool audioAac)
2037 : {
2038 1 : if (m_workerThread)
2039 : {
2040 2 : m_workerThread->enqueueTask(
2041 2 : m_taskFactory->createProcessAudioGap(m_context, position, duration, discontinuityGap, audioAac));
2042 : }
2043 1 : }
2044 :
2045 1 : void GstGenericPlayer::setBufferingLimit(uint32_t limitBufferingMs)
2046 : {
2047 1 : if (m_workerThread)
2048 : {
2049 1 : m_workerThread->enqueueTask(m_taskFactory->createSetBufferingLimit(m_context, *this, limitBufferingMs));
2050 : }
2051 : }
2052 :
2053 5 : bool GstGenericPlayer::getBufferingLimit(uint32_t &limitBufferingMs)
2054 : {
2055 5 : bool returnValue{false};
2056 5 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
2057 5 : if (decoder && m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "limit-buffering-ms"))
2058 : {
2059 2 : m_glibWrapper->gObjectGet(decoder, "limit-buffering-ms", &limitBufferingMs, nullptr);
2060 2 : returnValue = true;
2061 : }
2062 : else
2063 : {
2064 3 : std::unique_lock lock{m_context.propertyMutex};
2065 3 : if (m_context.pendingBufferingLimit.has_value())
2066 : {
2067 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
2068 1 : limitBufferingMs = m_context.pendingBufferingLimit.value();
2069 1 : returnValue = true;
2070 : }
2071 : else
2072 : {
2073 2 : RIALTO_SERVER_LOG_ERROR("buffering limit not supported in decoder '%s'",
2074 : (decoder ? GST_ELEMENT_NAME(decoder) : "null"));
2075 : }
2076 3 : }
2077 :
2078 5 : if (decoder)
2079 3 : m_gstWrapper->gstObjectUnref(GST_OBJECT(decoder));
2080 :
2081 5 : return returnValue;
2082 : }
2083 :
2084 1 : void GstGenericPlayer::setUseBuffering(bool useBuffering)
2085 : {
2086 1 : if (m_workerThread)
2087 : {
2088 1 : m_workerThread->enqueueTask(m_taskFactory->createSetUseBuffering(m_context, *this, useBuffering));
2089 : }
2090 : }
2091 :
2092 3 : bool GstGenericPlayer::getUseBuffering(bool &useBuffering)
2093 : {
2094 3 : if (m_context.playbackGroup.m_curAudioDecodeBin)
2095 : {
2096 1 : m_glibWrapper->gObjectGet(m_context.playbackGroup.m_curAudioDecodeBin, "use-buffering", &useBuffering, nullptr);
2097 1 : return true;
2098 : }
2099 : else
2100 : {
2101 2 : std::unique_lock lock{m_context.propertyMutex};
2102 2 : if (m_context.pendingUseBuffering.has_value())
2103 : {
2104 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
2105 1 : useBuffering = m_context.pendingUseBuffering.value();
2106 1 : return true;
2107 : }
2108 2 : }
2109 1 : return false;
2110 : }
2111 :
2112 1 : void GstGenericPlayer::switchSource(const std::unique_ptr<IMediaPipeline::MediaSource> &mediaSource)
2113 : {
2114 1 : if (m_workerThread)
2115 : {
2116 1 : m_workerThread->enqueueTask(m_taskFactory->createSwitchSource(*this, mediaSource));
2117 : }
2118 : }
2119 :
2120 2 : void GstGenericPlayer::handleBusMessage(GstMessage *message, bool priority)
2121 : {
2122 2 : if (priority)
2123 : {
2124 2 : m_workerThread->enqueuePriorityTask(
2125 2 : m_taskFactory->createHandleBusMessage(m_context, *this, message, *m_flushWatcher));
2126 : }
2127 : else
2128 : {
2129 1 : m_workerThread->enqueueTask(m_taskFactory->createHandleBusMessage(m_context, *this, message, *m_flushWatcher));
2130 : }
2131 2 : }
2132 :
2133 1 : void GstGenericPlayer::updatePlaybackGroup(GstElement *typefind, const GstCaps *caps)
2134 : {
2135 1 : m_workerThread->enqueueTask(m_taskFactory->createUpdatePlaybackGroup(m_context, *this, typefind, caps));
2136 : }
2137 :
2138 3 : void GstGenericPlayer::addAutoVideoSinkChild(GObject *object)
2139 : {
2140 : // Only add children that are sinks
2141 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2142 : {
2143 2 : RIALTO_SERVER_LOG_DEBUG("Store AutoVideoSink child sink");
2144 :
2145 2 : if (m_context.autoVideoChildSink && m_context.autoVideoChildSink != GST_ELEMENT(object))
2146 : {
2147 1 : RIALTO_SERVER_LOG_MIL("AutoVideoSink child is been overwritten");
2148 : }
2149 2 : m_context.autoVideoChildSink = GST_ELEMENT(object);
2150 : }
2151 3 : }
2152 :
2153 3 : void GstGenericPlayer::addAutoAudioSinkChild(GObject *object)
2154 : {
2155 : // Only add children that are sinks
2156 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2157 : {
2158 2 : RIALTO_SERVER_LOG_DEBUG("Store AutoAudioSink child sink");
2159 :
2160 2 : if (m_context.autoAudioChildSink && m_context.autoAudioChildSink != GST_ELEMENT(object))
2161 : {
2162 1 : RIALTO_SERVER_LOG_MIL("AutoAudioSink child is been overwritten");
2163 : }
2164 2 : m_context.autoAudioChildSink = GST_ELEMENT(object);
2165 : }
2166 3 : }
2167 :
2168 3 : void GstGenericPlayer::removeAutoVideoSinkChild(GObject *object)
2169 : {
2170 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2171 : {
2172 3 : RIALTO_SERVER_LOG_DEBUG("Remove AutoVideoSink child sink");
2173 :
2174 3 : if (m_context.autoVideoChildSink && m_context.autoVideoChildSink != GST_ELEMENT(object))
2175 : {
2176 1 : RIALTO_SERVER_LOG_MIL("AutoVideoSink child sink is not the same as the one stored");
2177 1 : return;
2178 : }
2179 :
2180 2 : m_context.autoVideoChildSink = nullptr;
2181 : }
2182 : }
2183 :
2184 3 : void GstGenericPlayer::removeAutoAudioSinkChild(GObject *object)
2185 : {
2186 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2187 : {
2188 3 : RIALTO_SERVER_LOG_DEBUG("Remove AutoAudioSink child sink");
2189 :
2190 3 : if (m_context.autoAudioChildSink && m_context.autoAudioChildSink != GST_ELEMENT(object))
2191 : {
2192 1 : RIALTO_SERVER_LOG_MIL("AutoAudioSink child sink is not the same as the one stored");
2193 1 : return;
2194 : }
2195 :
2196 2 : m_context.autoAudioChildSink = nullptr;
2197 : }
2198 : }
2199 :
2200 14 : GstElement *GstGenericPlayer::getSinkChildIfAutoVideoSink(GstElement *sink) const
2201 : {
2202 14 : const gchar *kTmpName = m_glibWrapper->gTypeName(G_OBJECT_TYPE(sink));
2203 14 : if (!kTmpName)
2204 0 : return sink;
2205 :
2206 28 : const std::string kElementTypeName{kTmpName};
2207 14 : if (kElementTypeName == "GstAutoVideoSink")
2208 : {
2209 1 : if (!m_context.autoVideoChildSink)
2210 : {
2211 0 : RIALTO_SERVER_LOG_WARN("No child sink has been added to the autovideosink");
2212 : }
2213 : else
2214 : {
2215 1 : return m_context.autoVideoChildSink;
2216 : }
2217 : }
2218 13 : return sink;
2219 14 : }
2220 :
2221 11 : GstElement *GstGenericPlayer::getSinkChildIfAutoAudioSink(GstElement *sink) const
2222 : {
2223 11 : const gchar *kTmpName = m_glibWrapper->gTypeName(G_OBJECT_TYPE(sink));
2224 11 : if (!kTmpName)
2225 0 : return sink;
2226 :
2227 22 : const std::string kElementTypeName{kTmpName};
2228 11 : if (kElementTypeName == "GstAutoAudioSink")
2229 : {
2230 1 : if (!m_context.autoAudioChildSink)
2231 : {
2232 0 : RIALTO_SERVER_LOG_WARN("No child sink has been added to the autoaudiosink");
2233 : }
2234 : else
2235 : {
2236 1 : return m_context.autoAudioChildSink;
2237 : }
2238 : }
2239 10 : return sink;
2240 11 : }
2241 :
2242 212 : void GstGenericPlayer::setPlaybinFlags(bool enableAudio)
2243 : {
2244 212 : unsigned flags = getGstPlayFlag("video") | getGstPlayFlag("native-video") | getGstPlayFlag("text");
2245 :
2246 212 : if (enableAudio)
2247 : {
2248 212 : flags |= getGstPlayFlag("audio");
2249 212 : flags |= shouldEnableNativeAudio() ? getGstPlayFlag("native-audio") : 0;
2250 : }
2251 :
2252 212 : m_glibWrapper->gObjectSet(m_context.pipeline, "flags", flags, nullptr);
2253 : }
2254 :
2255 212 : bool GstGenericPlayer::shouldEnableNativeAudio()
2256 : {
2257 212 : GstElementFactory *factory = m_gstWrapper->gstElementFactoryFind("brcmaudiosink");
2258 212 : if (factory)
2259 : {
2260 1 : m_gstWrapper->gstObjectUnref(GST_OBJECT(factory));
2261 1 : return true;
2262 : }
2263 211 : return false;
2264 : }
2265 :
2266 3 : void GstGenericPlayer::clearNeedDataScheduled(GstAppSrc *src)
2267 : {
2268 3 : m_scheduledNeedDatas.clearNeedDataScheduled(src);
2269 : }
2270 : }; // namespace firebolt::rialto::server
|