Line data Source code
1 : /*
2 : * If not stated otherwise in this file or this component's LICENSE file the
3 : * following copyright and licenses apply:
4 : *
5 : * Copyright 2022 Sky UK
6 : *
7 : * Licensed under the Apache License, Version 2.0 (the "License");
8 : * you may not use this file except in compliance with the License.
9 : * You may obtain a copy of the License at
10 : *
11 : * http://www.apache.org/licenses/LICENSE-2.0
12 : *
13 : * Unless required by applicable law or agreed to in writing, software
14 : * distributed under the License is distributed on an "AS IS" BASIS,
15 : * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 : * See the License for the specific language governing permissions and
17 : * limitations under the License.
18 : */
19 :
20 : #include <chrono>
21 : #include <cinttypes>
22 : #include <stdexcept>
23 :
24 : #include "FlushWatcher.h"
25 : #include "GstDispatcherThread.h"
26 : #include "GstGenericPlayer.h"
27 : #include "GstProtectionMetadata.h"
28 : #include "IGstTextTrackSinkFactory.h"
29 : #include "IMediaPipeline.h"
30 : #include "ITimer.h"
31 : #include "RialtoServerLogging.h"
32 : #include "TypeConverters.h"
33 : #include "Utils.h"
34 : #include "WorkerThread.h"
35 : #include "tasks/generic/GenericPlayerTaskFactory.h"
36 :
37 : namespace
38 : {
39 : /**
40 : * @brief Report position interval in ms.
41 : * The position reporting timer should be started whenever the PLAYING state is entered and stopped
42 : * whenever the session moves to another playback state.
43 : */
44 : constexpr std::chrono::milliseconds kPositionReportTimerMs{250};
45 : constexpr std::chrono::seconds kSubtitleClockResyncInterval{10};
46 :
47 1 : bool operator==(const firebolt::rialto::server::SegmentData &lhs, const firebolt::rialto::server::SegmentData &rhs)
48 : {
49 2 : return (lhs.position == rhs.position) && (lhs.resetTime == rhs.resetTime) && (lhs.appliedRate == rhs.appliedRate) &&
50 2 : (lhs.stopPosition == rhs.stopPosition);
51 : }
52 : } // namespace
53 :
54 : namespace firebolt::rialto::server
55 : {
56 : std::weak_ptr<IGstGenericPlayerFactory> GstGenericPlayerFactory::m_factory;
57 :
58 3 : std::shared_ptr<IGstGenericPlayerFactory> IGstGenericPlayerFactory::getFactory()
59 : {
60 3 : std::shared_ptr<IGstGenericPlayerFactory> factory = GstGenericPlayerFactory::m_factory.lock();
61 :
62 3 : if (!factory)
63 : {
64 : try
65 : {
66 3 : factory = std::make_shared<GstGenericPlayerFactory>();
67 : }
68 0 : catch (const std::exception &e)
69 : {
70 0 : RIALTO_SERVER_LOG_ERROR("Failed to create the gstreamer player factory, reason: %s", e.what());
71 : }
72 :
73 3 : GstGenericPlayerFactory::m_factory = factory;
74 : }
75 :
76 3 : return factory;
77 : }
78 :
79 1 : std::unique_ptr<IGstGenericPlayer> GstGenericPlayerFactory::createGstGenericPlayer(
80 : IGstGenericPlayerClient *client, IDecryptionService &decryptionService, MediaType type,
81 : const VideoRequirements &videoRequirements,
82 : const std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapperFactory> &rdkGstreamerUtilsWrapperFactory)
83 : {
84 1 : std::unique_ptr<IGstGenericPlayer> gstPlayer;
85 :
86 : try
87 : {
88 1 : auto gstWrapperFactory = firebolt::rialto::wrappers::IGstWrapperFactory::getFactory();
89 1 : auto glibWrapperFactory = firebolt::rialto::wrappers::IGlibWrapperFactory::getFactory();
90 1 : std::shared_ptr<firebolt::rialto::wrappers::IGstWrapper> gstWrapper;
91 1 : std::shared_ptr<firebolt::rialto::wrappers::IGlibWrapper> glibWrapper;
92 1 : std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapper> rdkGstreamerUtilsWrapper;
93 1 : if ((!gstWrapperFactory) || (!(gstWrapper = gstWrapperFactory->getGstWrapper())))
94 : {
95 0 : throw std::runtime_error("Cannot create GstWrapper");
96 : }
97 1 : if ((!glibWrapperFactory) || (!(glibWrapper = glibWrapperFactory->getGlibWrapper())))
98 : {
99 0 : throw std::runtime_error("Cannot create GlibWrapper");
100 : }
101 2 : if ((!rdkGstreamerUtilsWrapperFactory) ||
102 2 : (!(rdkGstreamerUtilsWrapper = rdkGstreamerUtilsWrapperFactory->createRdkGstreamerUtilsWrapper())))
103 : {
104 0 : throw std::runtime_error("Cannot create RdkGstreamerUtilsWrapper");
105 : }
106 : gstPlayer = std::make_unique<
107 2 : GstGenericPlayer>(client, decryptionService, type, videoRequirements, gstWrapper, glibWrapper,
108 2 : rdkGstreamerUtilsWrapper, IGstInitialiser::instance(), std::make_unique<FlushWatcher>(),
109 2 : IGstSrcFactory::getFactory(), common::ITimerFactory::getFactory(),
110 2 : std::make_unique<GenericPlayerTaskFactory>(client, gstWrapper, glibWrapper,
111 : rdkGstreamerUtilsWrapper,
112 2 : IGstTextTrackSinkFactory::createFactory()),
113 2 : std::make_unique<WorkerThreadFactory>(), std::make_unique<GstDispatcherThreadFactory>(),
114 3 : IGstProtectionMetadataHelperFactory::createFactory());
115 1 : }
116 0 : catch (const std::exception &e)
117 : {
118 0 : RIALTO_SERVER_LOG_ERROR("Failed to create the gstreamer player, reason: %s", e.what());
119 : }
120 :
121 1 : return gstPlayer;
122 : }
123 :
124 216 : GstGenericPlayer::GstGenericPlayer(
125 : IGstGenericPlayerClient *client, IDecryptionService &decryptionService, MediaType type,
126 : const VideoRequirements &videoRequirements,
127 : const std::shared_ptr<firebolt::rialto::wrappers::IGstWrapper> &gstWrapper,
128 : const std::shared_ptr<firebolt::rialto::wrappers::IGlibWrapper> &glibWrapper,
129 : const std::shared_ptr<firebolt::rialto::wrappers::IRdkGstreamerUtilsWrapper> &rdkGstreamerUtilsWrapper,
130 : const IGstInitialiser &gstInitialiser, std::unique_ptr<IFlushWatcher> &&flushWatcher,
131 : const std::shared_ptr<IGstSrcFactory> &gstSrcFactory, std::shared_ptr<common::ITimerFactory> timerFactory,
132 : std::unique_ptr<IGenericPlayerTaskFactory> taskFactory, std::unique_ptr<IWorkerThreadFactory> workerThreadFactory,
133 : std::unique_ptr<IGstDispatcherThreadFactory> gstDispatcherThreadFactory,
134 216 : std::shared_ptr<IGstProtectionMetadataHelperFactory> gstProtectionMetadataFactory)
135 216 : : m_gstPlayerClient(client), m_gstWrapper{gstWrapper}, m_glibWrapper{glibWrapper},
136 432 : m_rdkGstreamerUtilsWrapper{rdkGstreamerUtilsWrapper}, m_timerFactory{timerFactory},
137 648 : m_taskFactory{std::move(taskFactory)}, m_flushWatcher{std::move(flushWatcher)}
138 : {
139 216 : RIALTO_SERVER_LOG_DEBUG("GstGenericPlayer is constructed.");
140 :
141 216 : gstInitialiser.waitForInitialisation();
142 :
143 216 : m_context.decryptionService = &decryptionService;
144 :
145 216 : if ((!gstSrcFactory) || (!(m_context.gstSrc = gstSrcFactory->getGstSrc())))
146 : {
147 2 : throw std::runtime_error("Cannot create GstSrc");
148 : }
149 :
150 214 : if (!timerFactory)
151 : {
152 1 : throw std::runtime_error("TimeFactory is invalid");
153 : }
154 :
155 426 : if ((!gstProtectionMetadataFactory) ||
156 426 : (!(m_protectionMetadataWrapper = gstProtectionMetadataFactory->createProtectionMetadataWrapper(m_gstWrapper))))
157 : {
158 0 : throw std::runtime_error("Cannot create protection metadata wrapper");
159 : }
160 :
161 : // Ensure that rialtosrc has been initalised
162 213 : m_context.gstSrc->initSrc();
163 :
164 : // Start task thread
165 213 : if ((!workerThreadFactory) || (!(m_workerThread = workerThreadFactory->createWorkerThread())))
166 : {
167 0 : throw std::runtime_error("Failed to create the worker thread");
168 : }
169 :
170 : // Initialise pipeline
171 213 : switch (type)
172 : {
173 212 : case MediaType::MSE:
174 : {
175 212 : initMsePipeline();
176 212 : break;
177 : }
178 1 : default:
179 : {
180 1 : resetWorkerThread();
181 1 : throw std::runtime_error("Media type not supported");
182 : }
183 : }
184 :
185 : // Check the video requirements for a limited video.
186 : // If the video requirements are set to anything lower than the minimum, this playback is assumed to be a secondary
187 : // video in a dual video scenario.
188 212 : if ((kMinPrimaryVideoWidth > videoRequirements.maxWidth) || (kMinPrimaryVideoHeight > videoRequirements.maxHeight))
189 : {
190 8 : RIALTO_SERVER_LOG_MIL("Secondary video playback selected");
191 8 : bool westerossinkSecondaryVideoResult = setWesterossinkSecondaryVideo();
192 8 : bool ermContextResult = setErmContext();
193 8 : if (!westerossinkSecondaryVideoResult && !ermContextResult)
194 : {
195 1 : resetWorkerThread();
196 1 : termPipeline();
197 1 : throw std::runtime_error("Could not set secondary video");
198 : }
199 7 : }
200 : else
201 : {
202 204 : RIALTO_SERVER_LOG_MIL("Primary video playback selected");
203 : }
204 :
205 422 : m_gstDispatcherThread = gstDispatcherThreadFactory->createGstDispatcherThread(*this, m_context.pipeline,
206 211 : m_context.flushOnPrerollController,
207 211 : m_gstWrapper);
208 296 : }
209 :
210 422 : GstGenericPlayer::~GstGenericPlayer()
211 : {
212 211 : RIALTO_SERVER_LOG_DEBUG("GstGenericPlayer is destructed.");
213 211 : m_gstDispatcherThread.reset();
214 :
215 211 : resetWorkerThread();
216 :
217 211 : termPipeline();
218 422 : }
219 :
220 212 : void GstGenericPlayer::initMsePipeline()
221 : {
222 : // Make playbin
223 212 : m_context.pipeline = m_gstWrapper->gstElementFactoryMake("playbin", "media_pipeline");
224 : // Set pipeline flags
225 212 : setPlaybinFlags(true);
226 :
227 : // Set callbacks
228 212 : m_glibWrapper->gSignalConnect(m_context.pipeline, "source-setup", G_CALLBACK(&GstGenericPlayer::setupSource), this);
229 212 : m_glibWrapper->gSignalConnect(m_context.pipeline, "element-setup", G_CALLBACK(&GstGenericPlayer::setupElement), this);
230 212 : m_glibWrapper->gSignalConnect(m_context.pipeline, "deep-element-added",
231 : G_CALLBACK(&GstGenericPlayer::deepElementAdded), this);
232 :
233 : // Set uri
234 212 : m_glibWrapper->gObjectSet(m_context.pipeline, "uri", "rialto://", nullptr);
235 :
236 : // Check playsink
237 212 : GstElement *playsink = (m_gstWrapper->gstBinGetByName(GST_BIN(m_context.pipeline), "playsink"));
238 212 : if (playsink)
239 : {
240 211 : m_glibWrapper->gObjectSet(G_OBJECT(playsink), "send-event-mode", 0, nullptr);
241 211 : m_gstWrapper->gstObjectUnref(playsink);
242 : }
243 : else
244 : {
245 1 : GST_WARNING("No playsink ?!?!?");
246 : }
247 212 : if (GST_STATE_CHANGE_FAILURE == m_gstWrapper->gstElementSetState(m_context.pipeline, GST_STATE_READY))
248 : {
249 1 : GST_WARNING("Failed to set pipeline to READY state");
250 : }
251 212 : RIALTO_SERVER_LOG_MIL("New RialtoServer's pipeline created");
252 : }
253 :
254 213 : void GstGenericPlayer::resetWorkerThread()
255 : {
256 : // Shutdown task thread
257 213 : m_workerThread->enqueueTask(m_taskFactory->createShutdown(*this));
258 213 : m_workerThread->join();
259 213 : m_workerThread.reset();
260 : }
261 :
262 212 : void GstGenericPlayer::termPipeline()
263 : {
264 212 : if (m_finishSourceSetupTimer && m_finishSourceSetupTimer->isActive())
265 : {
266 0 : m_finishSourceSetupTimer->cancel();
267 : }
268 :
269 212 : m_finishSourceSetupTimer.reset();
270 :
271 261 : for (auto &elem : m_context.streamInfo)
272 : {
273 49 : StreamInfo &streamInfo = elem.second;
274 51 : for (auto &buffer : streamInfo.buffers)
275 : {
276 2 : m_gstWrapper->gstBufferUnref(buffer);
277 : }
278 :
279 49 : streamInfo.buffers.clear();
280 : }
281 :
282 212 : m_taskFactory->createStop(m_context, *this)->execute();
283 212 : GstBus *bus = m_gstWrapper->gstPipelineGetBus(GST_PIPELINE(m_context.pipeline));
284 212 : m_gstWrapper->gstBusSetSyncHandler(bus, nullptr, nullptr, nullptr);
285 212 : m_gstWrapper->gstObjectUnref(bus);
286 :
287 212 : if (m_context.source)
288 : {
289 1 : m_gstWrapper->gstObjectUnref(m_context.source);
290 : }
291 212 : if (m_context.subtitleSink)
292 : {
293 4 : m_gstWrapper->gstObjectUnref(m_context.subtitleSink);
294 4 : m_context.subtitleSink = nullptr;
295 : }
296 :
297 212 : if (m_context.videoSink)
298 : {
299 0 : m_gstWrapper->gstObjectUnref(m_context.videoSink);
300 0 : m_context.videoSink = nullptr;
301 : }
302 :
303 : // Delete the pipeline
304 212 : m_gstWrapper->gstObjectUnref(m_context.pipeline);
305 :
306 212 : RIALTO_SERVER_LOG_MIL("RialtoServer's pipeline terminated");
307 : }
308 :
309 849 : unsigned GstGenericPlayer::getGstPlayFlag(const char *nick)
310 : {
311 : GFlagsClass *flagsClass =
312 849 : static_cast<GFlagsClass *>(m_glibWrapper->gTypeClassRef(m_glibWrapper->gTypeFromName("GstPlayFlags")));
313 849 : GFlagsValue *flag = m_glibWrapper->gFlagsGetValueByNick(flagsClass, nick);
314 849 : return flag ? flag->value : 0;
315 : }
316 :
317 1 : void GstGenericPlayer::setupSource(GstElement *pipeline, GstElement *source, GstGenericPlayer *self)
318 : {
319 1 : self->m_gstWrapper->gstObjectRef(source);
320 1 : if (self->m_workerThread)
321 : {
322 1 : self->m_workerThread->enqueueTask(self->m_taskFactory->createSetupSource(self->m_context, *self, source));
323 : }
324 : }
325 :
326 1 : void GstGenericPlayer::setupElement(GstElement *pipeline, GstElement *element, GstGenericPlayer *self)
327 : {
328 1 : RIALTO_SERVER_LOG_DEBUG("Element %s added to the pipeline", GST_ELEMENT_NAME(element));
329 1 : self->m_gstWrapper->gstObjectRef(element);
330 1 : if (self->m_workerThread)
331 : {
332 1 : self->m_workerThread->enqueueTask(self->m_taskFactory->createSetupElement(self->m_context, *self, element));
333 : }
334 : }
335 :
336 1 : void GstGenericPlayer::deepElementAdded(GstBin *pipeline, GstBin *bin, GstElement *element, GstGenericPlayer *self)
337 : {
338 1 : RIALTO_SERVER_LOG_DEBUG("Deep element %s added to the pipeline", GST_ELEMENT_NAME(element));
339 1 : if (self->m_workerThread)
340 : {
341 2 : self->m_workerThread->enqueueTask(
342 2 : self->m_taskFactory->createDeepElementAdded(self->m_context, *self, pipeline, bin, element));
343 : }
344 1 : }
345 :
346 1 : void GstGenericPlayer::attachSource(const std::unique_ptr<IMediaPipeline::MediaSource> &attachedSource)
347 : {
348 1 : if (m_workerThread)
349 : {
350 1 : m_workerThread->enqueueTask(m_taskFactory->createAttachSource(m_context, *this, attachedSource));
351 : }
352 : }
353 :
354 2 : void GstGenericPlayer::allSourcesAttached()
355 : {
356 2 : if (m_workerThread)
357 : {
358 2 : m_workerThread->enqueueTask(m_taskFactory->createFinishSetupSource(m_context, *this));
359 : }
360 : }
361 :
362 1 : void GstGenericPlayer::attachSamples(const IMediaPipeline::MediaSegmentVector &mediaSegments)
363 : {
364 1 : if (m_workerThread)
365 : {
366 1 : m_workerThread->enqueueTask(m_taskFactory->createAttachSamples(m_context, *this, mediaSegments));
367 : }
368 : }
369 :
370 1 : void GstGenericPlayer::attachSamples(const std::shared_ptr<IDataReader> &dataReader)
371 : {
372 1 : if (m_workerThread)
373 : {
374 1 : m_workerThread->enqueueTask(m_taskFactory->createReadShmDataAndAttachSamples(m_context, *this, dataReader));
375 : }
376 : }
377 :
378 1 : void GstGenericPlayer::setPosition(std::int64_t position)
379 : {
380 1 : if (m_workerThread)
381 : {
382 1 : m_workerThread->enqueueTask(m_taskFactory->createSetPosition(m_context, *this, position));
383 : }
384 : }
385 :
386 1 : void GstGenericPlayer::setPlaybackRate(double rate)
387 : {
388 1 : if (m_workerThread)
389 : {
390 1 : m_workerThread->enqueueTask(m_taskFactory->createSetPlaybackRate(m_context, rate));
391 : }
392 : }
393 :
394 11 : bool GstGenericPlayer::getPosition(std::int64_t &position)
395 : {
396 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
397 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
398 11 : position = getPosition(m_context.pipeline);
399 11 : if (position == -1)
400 : {
401 3 : return false;
402 : }
403 :
404 8 : return true;
405 : }
406 :
407 44 : GstElement *GstGenericPlayer::getSink(const MediaSourceType &mediaSourceType) const
408 : {
409 44 : const char *kSinkName{nullptr};
410 44 : GstElement *sink{nullptr};
411 44 : switch (mediaSourceType)
412 : {
413 24 : case MediaSourceType::AUDIO:
414 24 : kSinkName = "audio-sink";
415 24 : break;
416 18 : case MediaSourceType::VIDEO:
417 18 : kSinkName = "video-sink";
418 18 : break;
419 2 : default:
420 2 : break;
421 : }
422 44 : if (!kSinkName)
423 : {
424 2 : RIALTO_SERVER_LOG_WARN("mediaSourceType not supported %d", static_cast<int>(mediaSourceType));
425 : }
426 : else
427 : {
428 42 : if (m_context.pipeline == nullptr)
429 : {
430 0 : RIALTO_SERVER_LOG_WARN("Pipeline is NULL!");
431 : }
432 : else
433 : {
434 42 : RIALTO_SERVER_LOG_DEBUG("Pipeline is valid: %p", m_context.pipeline);
435 : }
436 42 : m_glibWrapper->gObjectGet(m_context.pipeline, kSinkName, &sink, nullptr);
437 42 : if (sink)
438 : {
439 25 : GstElement *autoSink{sink};
440 25 : if (firebolt::rialto::MediaSourceType::VIDEO == mediaSourceType)
441 14 : autoSink = getSinkChildIfAutoVideoSink(sink);
442 11 : else if (firebolt::rialto::MediaSourceType::AUDIO == mediaSourceType)
443 11 : autoSink = getSinkChildIfAutoAudioSink(sink);
444 :
445 : // Is this an auto-sink?...
446 25 : if (autoSink != sink)
447 : {
448 2 : m_gstWrapper->gstObjectUnref(GST_OBJECT(sink));
449 :
450 : // increase the reference count of the auto sink
451 2 : sink = GST_ELEMENT(m_gstWrapper->gstObjectRef(GST_OBJECT(autoSink)));
452 : }
453 : }
454 : }
455 44 : return sink;
456 : }
457 :
458 1 : void GstGenericPlayer::setSourceFlushed(const MediaSourceType &mediaSourceType)
459 : {
460 1 : m_flushWatcher->setFlushed(mediaSourceType);
461 : }
462 :
463 6 : void GstGenericPlayer::notifyPlaybackInfo()
464 : {
465 6 : PlaybackInfo info;
466 6 : getPosition(info.currentPosition);
467 6 : getVolume(info.volume);
468 6 : m_gstPlayerClient->notifyPlaybackInfo(info);
469 : }
470 :
471 19 : GstElement *GstGenericPlayer::getDecoder(const MediaSourceType &mediaSourceType)
472 : {
473 19 : GstIterator *it = m_gstWrapper->gstBinIterateRecurse(GST_BIN(m_context.pipeline));
474 19 : GValue item = G_VALUE_INIT;
475 19 : gboolean done = FALSE;
476 :
477 28 : while (!done)
478 : {
479 21 : switch (m_gstWrapper->gstIteratorNext(it, &item))
480 : {
481 12 : case GST_ITERATOR_OK:
482 : {
483 12 : GstElement *element = GST_ELEMENT(m_glibWrapper->gValueGetObject(&item));
484 12 : GstElementFactory *factory = m_gstWrapper->gstElementGetFactory(element);
485 :
486 12 : if (factory)
487 : {
488 12 : GstElementFactoryListType type = GST_ELEMENT_FACTORY_TYPE_DECODER;
489 12 : if (mediaSourceType == MediaSourceType::AUDIO)
490 : {
491 12 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO;
492 : }
493 0 : else if (mediaSourceType == MediaSourceType::VIDEO)
494 : {
495 0 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO;
496 : }
497 :
498 12 : if (m_gstWrapper->gstElementFactoryListIsType(factory, type))
499 : {
500 12 : m_glibWrapper->gValueUnset(&item);
501 12 : m_gstWrapper->gstIteratorFree(it);
502 12 : return GST_ELEMENT(m_gstWrapper->gstObjectRef(element));
503 : }
504 : }
505 :
506 0 : m_glibWrapper->gValueUnset(&item);
507 0 : break;
508 : }
509 2 : case GST_ITERATOR_RESYNC:
510 2 : m_gstWrapper->gstIteratorResync(it);
511 2 : break;
512 7 : case GST_ITERATOR_ERROR:
513 : case GST_ITERATOR_DONE:
514 7 : done = TRUE;
515 7 : break;
516 : }
517 : }
518 :
519 7 : RIALTO_SERVER_LOG_WARN("Could not find decoder");
520 :
521 7 : m_glibWrapper->gValueUnset(&item);
522 7 : m_gstWrapper->gstIteratorFree(it);
523 :
524 7 : return nullptr;
525 : }
526 :
527 3 : GstElement *GstGenericPlayer::getParser(const MediaSourceType &mediaSourceType)
528 : {
529 3 : GstIterator *it = m_gstWrapper->gstBinIterateRecurse(GST_BIN(m_context.pipeline));
530 3 : GValue item = G_VALUE_INIT;
531 3 : gboolean done = FALSE;
532 :
533 4 : while (!done)
534 : {
535 3 : switch (m_gstWrapper->gstIteratorNext(it, &item))
536 : {
537 2 : case GST_ITERATOR_OK:
538 : {
539 2 : GstElement *element = GST_ELEMENT(m_glibWrapper->gValueGetObject(&item));
540 2 : GstElementFactory *factory = m_gstWrapper->gstElementGetFactory(element);
541 :
542 2 : if (factory)
543 : {
544 2 : GstElementFactoryListType type = GST_ELEMENT_FACTORY_TYPE_PARSER;
545 2 : if (mediaSourceType == MediaSourceType::AUDIO)
546 : {
547 0 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO;
548 : }
549 2 : else if (mediaSourceType == MediaSourceType::VIDEO)
550 : {
551 2 : type |= GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO;
552 : }
553 :
554 2 : if (m_gstWrapper->gstElementFactoryListIsType(factory, type))
555 : {
556 2 : m_glibWrapper->gValueUnset(&item);
557 2 : m_gstWrapper->gstIteratorFree(it);
558 2 : return GST_ELEMENT(m_gstWrapper->gstObjectRef(element));
559 : }
560 : }
561 :
562 0 : m_glibWrapper->gValueUnset(&item);
563 0 : break;
564 : }
565 0 : case GST_ITERATOR_RESYNC:
566 0 : m_gstWrapper->gstIteratorResync(it);
567 0 : break;
568 1 : case GST_ITERATOR_ERROR:
569 : case GST_ITERATOR_DONE:
570 1 : done = TRUE;
571 1 : break;
572 : }
573 : }
574 :
575 1 : RIALTO_SERVER_LOG_WARN("Could not find parser");
576 :
577 1 : m_glibWrapper->gValueUnset(&item);
578 1 : m_gstWrapper->gstIteratorFree(it);
579 :
580 1 : return nullptr;
581 : }
582 :
583 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate>
584 5 : GstGenericPlayer::createAudioAttributes(const std::unique_ptr<IMediaPipeline::MediaSource> &source) const
585 : {
586 5 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate> audioAttributes;
587 5 : const IMediaPipeline::MediaSourceAudio *kSource = dynamic_cast<IMediaPipeline::MediaSourceAudio *>(source.get());
588 5 : if (kSource)
589 : {
590 4 : firebolt::rialto::AudioConfig audioConfig = kSource->getAudioConfig();
591 : audioAttributes =
592 12 : firebolt::rialto::wrappers::AudioAttributesPrivate{"", // param set below.
593 4 : audioConfig.numberOfChannels, audioConfig.sampleRate,
594 : 0, // used only in one of logs in rdk_gstreamer_utils, no
595 : // need to set this param.
596 : 0, // used only in one of logs in rdk_gstreamer_utils, no
597 : // need to set this param.
598 4 : audioConfig.codecSpecificConfig.data(),
599 : static_cast<std::uint32_t>(
600 4 : audioConfig.codecSpecificConfig.size())};
601 4 : if (source->getMimeType() == "audio/mp4" || source->getMimeType() == "audio/aac")
602 : {
603 2 : audioAttributes->m_codecParam = "mp4a";
604 : }
605 2 : else if (source->getMimeType() == "audio/x-eac3")
606 : {
607 1 : audioAttributes->m_codecParam = "ec-3";
608 : }
609 1 : else if (source->getMimeType() == "audio/b-wav" || source->getMimeType() == "audio/x-raw")
610 : {
611 1 : audioAttributes->m_codecParam = "lpcm";
612 : }
613 4 : }
614 : else
615 : {
616 1 : RIALTO_SERVER_LOG_ERROR("Failed to cast source");
617 : }
618 :
619 5 : return audioAttributes;
620 : }
621 :
622 1 : bool GstGenericPlayer::setImmediateOutput(const MediaSourceType &mediaSourceType, bool immediateOutputParam)
623 : {
624 1 : if (!m_workerThread)
625 0 : return false;
626 :
627 2 : m_workerThread->enqueueTask(
628 2 : m_taskFactory->createSetImmediateOutput(m_context, *this, mediaSourceType, immediateOutputParam));
629 1 : return true;
630 : }
631 :
632 5 : bool GstGenericPlayer::getImmediateOutput(const MediaSourceType &mediaSourceType, bool &immediateOutputRef)
633 : {
634 5 : bool returnValue{false};
635 5 : GstElement *sink{getSink(mediaSourceType)};
636 5 : if (sink)
637 : {
638 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "immediate-output"))
639 : {
640 2 : m_glibWrapper->gObjectGet(sink, "immediate-output", &immediateOutputRef, nullptr);
641 2 : returnValue = true;
642 : }
643 : else
644 : {
645 1 : RIALTO_SERVER_LOG_ERROR("immediate-output not supported in element %s", GST_ELEMENT_NAME(sink));
646 : }
647 3 : m_gstWrapper->gstObjectUnref(sink);
648 : }
649 : else
650 : {
651 2 : RIALTO_SERVER_LOG_ERROR("Failed to set immediate-output property, sink is NULL");
652 : }
653 :
654 5 : return returnValue;
655 : }
656 :
657 5 : bool GstGenericPlayer::getStats(const MediaSourceType &mediaSourceType, uint64_t &renderedFrames, uint64_t &droppedFrames)
658 : {
659 5 : bool returnValue{false};
660 5 : GstElement *sink{getSink(mediaSourceType)};
661 5 : if (sink)
662 : {
663 3 : GstStructure *stats{nullptr};
664 3 : m_glibWrapper->gObjectGet(sink, "stats", &stats, nullptr);
665 3 : if (!stats)
666 : {
667 1 : RIALTO_SERVER_LOG_ERROR("failed to get stats from '%s'", GST_ELEMENT_NAME(sink));
668 : }
669 : else
670 : {
671 : guint64 renderedFramesTmp;
672 : guint64 droppedFramesTmp;
673 3 : if (m_gstWrapper->gstStructureGetUint64(stats, "rendered", &renderedFramesTmp) &&
674 1 : m_gstWrapper->gstStructureGetUint64(stats, "dropped", &droppedFramesTmp))
675 : {
676 1 : renderedFrames = renderedFramesTmp;
677 1 : droppedFrames = droppedFramesTmp;
678 1 : returnValue = true;
679 : }
680 : else
681 : {
682 1 : RIALTO_SERVER_LOG_ERROR("failed to get 'rendered' or 'dropped' from structure (%s)",
683 : GST_ELEMENT_NAME(sink));
684 : }
685 2 : m_gstWrapper->gstStructureFree(stats);
686 : }
687 3 : m_gstWrapper->gstObjectUnref(sink);
688 : }
689 : else
690 : {
691 2 : RIALTO_SERVER_LOG_ERROR("Failed to get stats, sink is NULL");
692 : }
693 :
694 5 : return returnValue;
695 : }
696 :
697 4 : GstBuffer *GstGenericPlayer::createBuffer(const IMediaPipeline::MediaSegment &mediaSegment) const
698 : {
699 4 : GstBuffer *gstBuffer = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getDataLength(), nullptr);
700 4 : m_gstWrapper->gstBufferFill(gstBuffer, 0, mediaSegment.getData(), mediaSegment.getDataLength());
701 :
702 4 : if (mediaSegment.isEncrypted())
703 : {
704 3 : GstBuffer *keyId = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getKeyId().size(), nullptr);
705 3 : m_gstWrapper->gstBufferFill(keyId, 0, mediaSegment.getKeyId().data(), mediaSegment.getKeyId().size());
706 :
707 3 : GstBuffer *initVector = m_gstWrapper->gstBufferNewAllocate(nullptr, mediaSegment.getInitVector().size(), nullptr);
708 6 : m_gstWrapper->gstBufferFill(initVector, 0, mediaSegment.getInitVector().data(),
709 3 : mediaSegment.getInitVector().size());
710 3 : GstBuffer *subsamples{nullptr};
711 3 : if (!mediaSegment.getSubSamples().empty())
712 : {
713 3 : auto subsamplesRawSize = mediaSegment.getSubSamples().size() * (sizeof(guint16) + sizeof(guint32));
714 3 : guint8 *subsamplesRaw = static_cast<guint8 *>(m_glibWrapper->gMalloc(subsamplesRawSize));
715 : GstByteWriter writer;
716 3 : m_gstWrapper->gstByteWriterInitWithData(&writer, subsamplesRaw, subsamplesRawSize, FALSE);
717 :
718 6 : for (const auto &subSample : mediaSegment.getSubSamples())
719 : {
720 3 : m_gstWrapper->gstByteWriterPutUint16Be(&writer, subSample.numClearBytes);
721 3 : m_gstWrapper->gstByteWriterPutUint32Be(&writer, subSample.numEncryptedBytes);
722 : }
723 3 : subsamples = m_gstWrapper->gstBufferNewWrapped(subsamplesRaw, subsamplesRawSize);
724 : }
725 :
726 3 : uint32_t crypt = 0;
727 3 : uint32_t skip = 0;
728 3 : bool encryptionPatternSet = mediaSegment.getEncryptionPattern(crypt, skip);
729 :
730 3 : GstRialtoProtectionData data = {mediaSegment.getMediaKeySessionId(),
731 3 : static_cast<uint32_t>(mediaSegment.getSubSamples().size()),
732 3 : mediaSegment.getInitWithLast15(),
733 : keyId,
734 : initVector,
735 : subsamples,
736 6 : mediaSegment.getCipherMode(),
737 : crypt,
738 : skip,
739 : encryptionPatternSet,
740 6 : m_context.decryptionService};
741 :
742 3 : if (!m_protectionMetadataWrapper->addProtectionMetadata(gstBuffer, data))
743 : {
744 1 : RIALTO_SERVER_LOG_ERROR("Failed to add protection metadata");
745 1 : if (keyId)
746 : {
747 1 : m_gstWrapper->gstBufferUnref(keyId);
748 : }
749 1 : if (initVector)
750 : {
751 1 : m_gstWrapper->gstBufferUnref(initVector);
752 : }
753 1 : if (subsamples)
754 : {
755 1 : m_gstWrapper->gstBufferUnref(subsamples);
756 : }
757 : }
758 : }
759 :
760 4 : GST_BUFFER_TIMESTAMP(gstBuffer) = mediaSegment.getTimeStamp();
761 4 : GST_BUFFER_DURATION(gstBuffer) = mediaSegment.getDuration();
762 4 : return gstBuffer;
763 : }
764 :
765 4 : void GstGenericPlayer::notifyNeedMediaData(const MediaSourceType mediaSource)
766 : {
767 4 : auto elem = m_context.streamInfo.find(mediaSource);
768 4 : if (elem != m_context.streamInfo.end())
769 : {
770 2 : StreamInfo &streamInfo = elem->second;
771 2 : streamInfo.isNeedDataPending = false;
772 :
773 : // Send new NeedMediaData if we still need it
774 2 : if (m_gstPlayerClient && streamInfo.isDataNeeded)
775 : {
776 2 : streamInfo.isNeedDataPending = m_gstPlayerClient->notifyNeedMediaData(mediaSource);
777 : }
778 : }
779 : else
780 : {
781 2 : RIALTO_SERVER_LOG_WARN("Media type %s could not be found", common::convertMediaSourceType(mediaSource));
782 : }
783 4 : }
784 :
785 19 : void GstGenericPlayer::attachData(const firebolt::rialto::MediaSourceType mediaType)
786 : {
787 19 : auto elem = m_context.streamInfo.find(mediaType);
788 19 : if (elem != m_context.streamInfo.end())
789 : {
790 16 : StreamInfo &streamInfo = elem->second;
791 16 : if (streamInfo.buffers.empty() || !streamInfo.isDataNeeded)
792 : {
793 2 : return;
794 : }
795 :
796 14 : if (firebolt::rialto::MediaSourceType::SUBTITLE == mediaType)
797 : {
798 2 : setTextTrackPositionIfRequired(streamInfo.appSrc);
799 : }
800 : else
801 : {
802 36 : pushSampleIfRequired(streamInfo.appSrc, common::convertMediaSourceType(mediaType));
803 : }
804 14 : if (mediaType == firebolt::rialto::MediaSourceType::AUDIO)
805 : {
806 : // This needs to be done before gstAppSrcPushBuffer() is
807 : // called because it can free the memory
808 7 : m_context.lastAudioSampleTimestamps = static_cast<int64_t>(GST_BUFFER_PTS(streamInfo.buffers.back()));
809 : }
810 :
811 28 : for (GstBuffer *buffer : streamInfo.buffers)
812 : {
813 14 : m_gstWrapper->gstAppSrcPushBuffer(GST_APP_SRC(streamInfo.appSrc), buffer);
814 : }
815 14 : streamInfo.buffers.clear();
816 14 : streamInfo.isDataPushed = true;
817 :
818 14 : const bool kIsSingle = m_context.streamInfo.size() == 1;
819 14 : bool allOtherStreamsPushed = std::all_of(m_context.streamInfo.begin(), m_context.streamInfo.end(),
820 15 : [](const auto &entry) { return entry.second.isDataPushed; });
821 :
822 14 : if (!m_context.bufferedNotificationSent && (allOtherStreamsPushed || kIsSingle) && m_gstPlayerClient)
823 : {
824 1 : m_context.bufferedNotificationSent = true;
825 1 : m_gstPlayerClient->notifyNetworkState(NetworkState::BUFFERED);
826 1 : RIALTO_SERVER_LOG_MIL("Buffered NetworkState reached");
827 : }
828 14 : cancelUnderflow(mediaType);
829 :
830 14 : const auto eosInfoIt = m_context.endOfStreamInfo.find(mediaType);
831 14 : if (eosInfoIt != m_context.endOfStreamInfo.end() && eosInfoIt->second == EosState::PENDING)
832 : {
833 0 : setEos(mediaType);
834 : }
835 : }
836 : }
837 :
838 7 : void GstGenericPlayer::updateAudioCaps(int32_t rate, int32_t channels, const std::shared_ptr<CodecData> &codecData)
839 : {
840 7 : auto elem = m_context.streamInfo.find(firebolt::rialto::MediaSourceType::AUDIO);
841 7 : if (elem != m_context.streamInfo.end())
842 : {
843 6 : StreamInfo &streamInfo = elem->second;
844 :
845 6 : constexpr int kInvalidRate{0}, kInvalidChannels{0};
846 6 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(streamInfo.appSrc));
847 6 : GstCaps *newCaps = m_gstWrapper->gstCapsCopy(currentCaps);
848 :
849 6 : if (rate != kInvalidRate)
850 : {
851 3 : m_gstWrapper->gstCapsSetSimple(newCaps, "rate", G_TYPE_INT, rate, NULL);
852 : }
853 :
854 6 : if (channels != kInvalidChannels)
855 : {
856 3 : m_gstWrapper->gstCapsSetSimple(newCaps, "channels", G_TYPE_INT, channels, NULL);
857 : }
858 :
859 6 : setCodecData(newCaps, codecData);
860 :
861 6 : if (!m_gstWrapper->gstCapsIsEqual(currentCaps, newCaps))
862 : {
863 5 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(streamInfo.appSrc), newCaps);
864 : }
865 :
866 6 : m_gstWrapper->gstCapsUnref(newCaps);
867 6 : m_gstWrapper->gstCapsUnref(currentCaps);
868 : }
869 7 : }
870 :
871 8 : void GstGenericPlayer::updateVideoCaps(int32_t width, int32_t height, Fraction frameRate,
872 : const std::shared_ptr<CodecData> &codecData)
873 : {
874 8 : auto elem = m_context.streamInfo.find(firebolt::rialto::MediaSourceType::VIDEO);
875 8 : if (elem != m_context.streamInfo.end())
876 : {
877 7 : StreamInfo &streamInfo = elem->second;
878 :
879 7 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(streamInfo.appSrc));
880 7 : GstCaps *newCaps = m_gstWrapper->gstCapsCopy(currentCaps);
881 :
882 7 : if (width > 0)
883 : {
884 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "width", G_TYPE_INT, width, NULL);
885 : }
886 :
887 7 : if (height > 0)
888 : {
889 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "height", G_TYPE_INT, height, NULL);
890 : }
891 :
892 7 : if ((kUndefinedSize != frameRate.numerator) && (kUndefinedSize != frameRate.denominator))
893 : {
894 6 : m_gstWrapper->gstCapsSetSimple(newCaps, "framerate", GST_TYPE_FRACTION, frameRate.numerator,
895 : frameRate.denominator, NULL);
896 : }
897 :
898 7 : setCodecData(newCaps, codecData);
899 :
900 7 : if (!m_gstWrapper->gstCapsIsEqual(currentCaps, newCaps))
901 : {
902 6 : m_gstWrapper->gstAppSrcSetCaps(GST_APP_SRC(streamInfo.appSrc), newCaps);
903 : }
904 :
905 7 : m_gstWrapper->gstCapsUnref(currentCaps);
906 7 : m_gstWrapper->gstCapsUnref(newCaps);
907 : }
908 8 : }
909 :
910 5 : void GstGenericPlayer::addAudioClippingToBuffer(GstBuffer *buffer, uint64_t clippingStart, uint64_t clippingEnd) const
911 : {
912 5 : if (clippingStart || clippingEnd)
913 : {
914 4 : if (m_gstWrapper->gstBufferAddAudioClippingMeta(buffer, GST_FORMAT_TIME, clippingStart, clippingEnd))
915 : {
916 3 : RIALTO_SERVER_LOG_DEBUG("Added audio clipping to buffer %p, start: %" PRIu64 ", end %" PRIu64, buffer,
917 : clippingStart, clippingEnd);
918 : }
919 : else
920 : {
921 1 : RIALTO_SERVER_LOG_WARN("Failed to add audio clipping to buffer %p, start: %" PRIu64 ", end %" PRIu64,
922 : buffer, clippingStart, clippingEnd);
923 : }
924 : }
925 5 : }
926 :
927 13 : bool GstGenericPlayer::setCodecData(GstCaps *caps, const std::shared_ptr<CodecData> &codecData) const
928 : {
929 13 : if (codecData && CodecDataType::BUFFER == codecData->type)
930 : {
931 7 : gpointer memory = m_glibWrapper->gMemdup(codecData->data.data(), codecData->data.size());
932 7 : GstBuffer *buf = m_gstWrapper->gstBufferNewWrapped(memory, codecData->data.size());
933 7 : m_gstWrapper->gstCapsSetSimple(caps, "codec_data", GST_TYPE_BUFFER, buf, nullptr);
934 7 : m_gstWrapper->gstBufferUnref(buf);
935 7 : return true;
936 : }
937 6 : if (codecData && CodecDataType::STRING == codecData->type)
938 : {
939 2 : std::string codecDataStr(codecData->data.begin(), codecData->data.end());
940 2 : m_gstWrapper->gstCapsSetSimple(caps, "codec_data", G_TYPE_STRING, codecDataStr.c_str(), nullptr);
941 2 : return true;
942 : }
943 4 : return false;
944 : }
945 :
946 12 : void GstGenericPlayer::pushSampleIfRequired(GstElement *source, const std::string &typeStr)
947 : {
948 12 : auto initialPosition = m_context.initialPositions.find(source);
949 12 : if (m_context.initialPositions.end() == initialPosition)
950 : {
951 : // Sending initial sample not needed
952 7 : return;
953 : }
954 : // GstAppSrc does not replace segment, if it's the same as previous one.
955 : // It causes problems with position reporing in amlogic devices, so we need to push
956 : // two segments with different reset time value.
957 5 : pushAdditionalSegmentIfRequired(source);
958 :
959 10 : for (const auto &[position, resetTime, appliedRate, stopPosition] : initialPosition->second)
960 : {
961 6 : GstSeekFlags seekFlag = resetTime ? GST_SEEK_FLAG_FLUSH : GST_SEEK_FLAG_NONE;
962 6 : RIALTO_SERVER_LOG_DEBUG("Pushing new %s sample...", typeStr.c_str());
963 6 : GstSegment *segment{m_gstWrapper->gstSegmentNew()};
964 6 : m_gstWrapper->gstSegmentInit(segment, GST_FORMAT_TIME);
965 6 : if (!m_gstWrapper->gstSegmentDoSeek(segment, m_context.playbackRate, GST_FORMAT_TIME, seekFlag,
966 : GST_SEEK_TYPE_SET, position, GST_SEEK_TYPE_SET, stopPosition, nullptr))
967 : {
968 1 : RIALTO_SERVER_LOG_WARN("Segment seek failed.");
969 1 : m_gstWrapper->gstSegmentFree(segment);
970 1 : m_context.initialPositions.erase(initialPosition);
971 1 : return;
972 : }
973 5 : segment->applied_rate = appliedRate;
974 5 : RIALTO_SERVER_LOG_MIL("New %s segment: [%" GST_TIME_FORMAT ", %" GST_TIME_FORMAT
975 : "], rate: %f, appliedRate %f, reset_time: %d\n",
976 : typeStr.c_str(), GST_TIME_ARGS(segment->start), GST_TIME_ARGS(segment->stop),
977 : segment->rate, segment->applied_rate, resetTime);
978 :
979 5 : GstCaps *currentCaps = m_gstWrapper->gstAppSrcGetCaps(GST_APP_SRC(source));
980 : // We can't pass buffer in GstSample, because implementation of gst_app_src_push_sample
981 : // uses gst_buffer_copy, which loses RialtoProtectionMeta (that causes problems with EME
982 : // for first frame).
983 5 : GstSample *sample = m_gstWrapper->gstSampleNew(nullptr, currentCaps, segment, nullptr);
984 5 : m_gstWrapper->gstAppSrcPushSample(GST_APP_SRC(source), sample);
985 5 : m_gstWrapper->gstSampleUnref(sample);
986 5 : m_gstWrapper->gstCapsUnref(currentCaps);
987 :
988 5 : m_gstWrapper->gstSegmentFree(segment);
989 : }
990 4 : m_context.currentPosition[source] = initialPosition->second.back();
991 4 : m_context.initialPositions.erase(initialPosition);
992 4 : return;
993 : }
994 :
995 5 : void GstGenericPlayer::pushAdditionalSegmentIfRequired(GstElement *source)
996 : {
997 5 : auto currentPosition = m_context.currentPosition.find(source);
998 5 : if (m_context.currentPosition.end() == currentPosition)
999 : {
1000 4 : return;
1001 : }
1002 1 : auto initialPosition = m_context.initialPositions.find(source);
1003 1 : if (m_context.initialPositions.end() == initialPosition)
1004 : {
1005 0 : return;
1006 : }
1007 2 : if (initialPosition->second.size() == 1 && initialPosition->second.back().resetTime &&
1008 1 : currentPosition->second == initialPosition->second.back())
1009 : {
1010 1 : RIALTO_SERVER_LOG_INFO("Adding additional segment with reset_time = false");
1011 1 : SegmentData additionalSegment = initialPosition->second.back();
1012 1 : additionalSegment.resetTime = false;
1013 1 : initialPosition->second.push_back(additionalSegment);
1014 : }
1015 : }
1016 :
1017 2 : void GstGenericPlayer::setTextTrackPositionIfRequired(GstElement *source)
1018 : {
1019 2 : auto initialPosition = m_context.initialPositions.find(source);
1020 2 : if (m_context.initialPositions.end() == initialPosition)
1021 : {
1022 : // Sending initial sample not needed
1023 1 : return;
1024 : }
1025 :
1026 1 : RIALTO_SERVER_LOG_MIL("New subtitle position set %" GST_TIME_FORMAT,
1027 : GST_TIME_ARGS(initialPosition->second.back().position));
1028 1 : m_glibWrapper->gObjectSet(m_context.subtitleSink, "position",
1029 1 : static_cast<guint64>(initialPosition->second.back().position), nullptr);
1030 :
1031 1 : m_context.initialPositions.erase(initialPosition);
1032 : }
1033 :
1034 7 : bool GstGenericPlayer::reattachSource(const std::unique_ptr<IMediaPipeline::MediaSource> &source)
1035 : {
1036 7 : if (m_context.streamInfo.find(source->getType()) == m_context.streamInfo.end())
1037 : {
1038 1 : RIALTO_SERVER_LOG_ERROR("Unable to switch source, type does not exist");
1039 1 : return false;
1040 : }
1041 6 : if (source->getMimeType().empty())
1042 : {
1043 1 : RIALTO_SERVER_LOG_WARN("Skip switch audio source. Unknown mime type");
1044 1 : return false;
1045 : }
1046 5 : std::optional<firebolt::rialto::wrappers::AudioAttributesPrivate> audioAttributes{createAudioAttributes(source)};
1047 5 : if (!audioAttributes)
1048 : {
1049 1 : RIALTO_SERVER_LOG_ERROR("Failed to create audio attributes");
1050 1 : return false;
1051 : }
1052 :
1053 4 : long long currentDispPts = getPosition(m_context.pipeline); // NOLINT(runtime/int)
1054 4 : GstCaps *caps{createCapsFromMediaSource(m_gstWrapper, m_glibWrapper, source)};
1055 4 : GstAppSrc *appSrc{GST_APP_SRC(m_context.streamInfo[source->getType()].appSrc)};
1056 4 : GstCaps *oldCaps = m_gstWrapper->gstAppSrcGetCaps(appSrc);
1057 4 : if ((!oldCaps) || (!m_gstWrapper->gstCapsIsEqual(caps, oldCaps)))
1058 : {
1059 3 : RIALTO_SERVER_LOG_DEBUG("Caps not equal. Perform audio track codec channel switch.");
1060 3 : int sampleAttributes{
1061 : 0}; // rdk_gstreamer_utils::performAudioTrackCodecChannelSwitch checks if this param != NULL only.
1062 3 : std::uint32_t status{0}; // must be 0 to make rdk_gstreamer_utils::performAudioTrackCodecChannelSwitch work
1063 3 : unsigned int ui32Delay{0}; // output param
1064 3 : long long audioChangeTargetPts{-1}; // NOLINT(runtime/int) output param. Set audioChangeTargetPts =
1065 : // currentDispPts in rdk_gstreamer_utils function stub
1066 3 : unsigned int audioChangeStage{0}; // Output param. Set to AUDCHG_ALIGN in rdk_gstreamer_utils function stub
1067 3 : gchar *oldCapsCStr = m_gstWrapper->gstCapsToString(oldCaps);
1068 3 : std::string oldCapsStr = std::string(oldCapsCStr);
1069 3 : m_glibWrapper->gFree(oldCapsCStr);
1070 3 : bool audioAac{oldCapsStr.find("audio/mpeg") != std::string::npos};
1071 3 : bool svpEnabled{true}; // assume always true
1072 3 : bool retVal{false}; // Output param. Set to TRUE in rdk_gstreamer_utils function stub
1073 : bool result =
1074 3 : m_rdkGstreamerUtilsWrapper
1075 6 : ->performAudioTrackCodecChannelSwitch(&m_context.playbackGroup, &sampleAttributes, &(*audioAttributes),
1076 : &status, &ui32Delay, &audioChangeTargetPts, ¤tDispPts,
1077 : &audioChangeStage,
1078 : &caps, // may fail for amlogic - that implementation changes
1079 : // this parameter, it's probably used by Netflix later
1080 3 : &audioAac, svpEnabled, GST_ELEMENT(appSrc), &retVal);
1081 :
1082 3 : if (!result || !retVal)
1083 : {
1084 3 : RIALTO_SERVER_LOG_WARN("performAudioTrackCodecChannelSwitch failed! Result: %d, retval %d", result, retVal);
1085 : }
1086 : }
1087 : else
1088 : {
1089 1 : RIALTO_SERVER_LOG_DEBUG("Skip switching audio source - caps are the same.");
1090 : }
1091 :
1092 4 : m_context.lastAudioSampleTimestamps = currentDispPts;
1093 4 : if (caps)
1094 4 : m_gstWrapper->gstCapsUnref(caps);
1095 4 : if (oldCaps)
1096 4 : m_gstWrapper->gstCapsUnref(oldCaps);
1097 :
1098 4 : return true;
1099 5 : }
1100 :
1101 0 : bool GstGenericPlayer::hasSourceType(const MediaSourceType &mediaSourceType) const
1102 : {
1103 0 : return m_context.streamInfo.find(mediaSourceType) != m_context.streamInfo.end();
1104 : }
1105 :
1106 87 : void GstGenericPlayer::scheduleNeedMediaData(GstAppSrc *src)
1107 : {
1108 87 : if (m_workerThread)
1109 : {
1110 87 : m_workerThread->enqueueTask(m_taskFactory->createNeedData(m_context, *this, src));
1111 : }
1112 : }
1113 :
1114 1 : void GstGenericPlayer::scheduleEnoughData(GstAppSrc *src)
1115 : {
1116 1 : if (m_workerThread)
1117 : {
1118 1 : m_workerThread->enqueueTask(m_taskFactory->createEnoughData(m_context, src));
1119 : }
1120 : }
1121 :
1122 2 : void GstGenericPlayer::scheduleAudioUnderflow()
1123 : {
1124 2 : if (m_workerThread)
1125 : {
1126 2 : bool underflowEnabled = m_context.isPlaying;
1127 4 : m_workerThread->enqueueTask(
1128 4 : m_taskFactory->createUnderflow(m_context, *this, underflowEnabled, MediaSourceType::AUDIO));
1129 : }
1130 2 : }
1131 :
1132 2 : void GstGenericPlayer::scheduleVideoUnderflow()
1133 : {
1134 2 : if (m_workerThread)
1135 : {
1136 2 : bool underflowEnabled = m_context.isPlaying;
1137 4 : m_workerThread->enqueueTask(
1138 4 : m_taskFactory->createUnderflow(m_context, *this, underflowEnabled, MediaSourceType::VIDEO));
1139 : }
1140 2 : }
1141 :
1142 1 : void GstGenericPlayer::scheduleAllSourcesAttached()
1143 : {
1144 1 : allSourcesAttached();
1145 : }
1146 :
1147 14 : void GstGenericPlayer::cancelUnderflow(firebolt::rialto::MediaSourceType mediaSource)
1148 : {
1149 14 : auto elem = m_context.streamInfo.find(mediaSource);
1150 14 : if (elem != m_context.streamInfo.end())
1151 : {
1152 14 : StreamInfo &streamInfo = elem->second;
1153 14 : if (!streamInfo.underflowOccured)
1154 : {
1155 11 : return;
1156 : }
1157 :
1158 3 : RIALTO_SERVER_LOG_DEBUG("Cancelling %s underflow", common::convertMediaSourceType(mediaSource));
1159 3 : streamInfo.underflowOccured = false;
1160 : }
1161 : }
1162 :
1163 3 : void GstGenericPlayer::play(bool &async)
1164 : {
1165 3 : if (0 == m_ongoingStateChangesNumber)
1166 : {
1167 : // Operation called on main thread, because PAUSED->PLAYING change is synchronous and needs to be done fast.
1168 : //
1169 : // m_context.pipeline can be used, because it's modified only in GstGenericPlayer
1170 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
1171 2 : ++m_ongoingStateChangesNumber;
1172 2 : async = (changePipelineState(GST_STATE_PLAYING) == GST_STATE_CHANGE_ASYNC);
1173 2 : RIALTO_SERVER_LOG_MIL("State change to PLAYING requested");
1174 : }
1175 : else
1176 : {
1177 1 : ++m_ongoingStateChangesNumber;
1178 1 : async = true;
1179 1 : if (m_workerThread)
1180 : {
1181 1 : m_workerThread->enqueueTask(m_taskFactory->createPlay(*this));
1182 : }
1183 : }
1184 3 : }
1185 :
1186 2 : void GstGenericPlayer::pause()
1187 : {
1188 2 : ++m_ongoingStateChangesNumber;
1189 2 : if (m_workerThread)
1190 : {
1191 2 : m_workerThread->enqueueTask(m_taskFactory->createPause(m_context, *this));
1192 : }
1193 : }
1194 :
1195 1 : void GstGenericPlayer::stop()
1196 : {
1197 1 : ++m_ongoingStateChangesNumber;
1198 1 : if (m_workerThread)
1199 : {
1200 1 : m_workerThread->enqueueTask(m_taskFactory->createStop(m_context, *this));
1201 : }
1202 : }
1203 :
1204 6 : GstStateChangeReturn GstGenericPlayer::changePipelineState(GstState newState)
1205 : {
1206 6 : if (!m_context.pipeline)
1207 : {
1208 1 : RIALTO_SERVER_LOG_ERROR("Change state failed - pipeline is nullptr");
1209 1 : if (m_gstPlayerClient)
1210 1 : m_gstPlayerClient->notifyPlaybackState(PlaybackState::FAILURE);
1211 1 : --m_ongoingStateChangesNumber;
1212 1 : return GST_STATE_CHANGE_FAILURE;
1213 : }
1214 5 : m_context.flushOnPrerollController->setTargetState(newState);
1215 5 : const GstStateChangeReturn result{m_gstWrapper->gstElementSetState(m_context.pipeline, newState)};
1216 5 : if (result == GST_STATE_CHANGE_FAILURE)
1217 : {
1218 1 : RIALTO_SERVER_LOG_ERROR("Change state failed - Gstreamer returned an error");
1219 1 : if (m_gstPlayerClient)
1220 1 : m_gstPlayerClient->notifyPlaybackState(PlaybackState::FAILURE);
1221 : }
1222 5 : --m_ongoingStateChangesNumber;
1223 5 : return result;
1224 : }
1225 :
1226 15 : int64_t GstGenericPlayer::getPosition(GstElement *element)
1227 : {
1228 15 : if (!element)
1229 : {
1230 1 : RIALTO_SERVER_LOG_WARN("Element is null");
1231 1 : return -1;
1232 : }
1233 :
1234 14 : m_gstWrapper->gstStateLock(element);
1235 :
1236 28 : if (m_gstWrapper->gstElementGetState(element) < GST_STATE_PAUSED ||
1237 14 : (m_gstWrapper->gstElementGetStateReturn(element) == GST_STATE_CHANGE_ASYNC &&
1238 1 : m_gstWrapper->gstElementGetStateNext(element) == GST_STATE_PAUSED))
1239 : {
1240 1 : RIALTO_SERVER_LOG_WARN("Element is prerolling or in invalid state - state: %s, return: %s, next: %s",
1241 : m_gstWrapper->gstElementStateGetName(m_gstWrapper->gstElementGetState(element)),
1242 : m_gstWrapper->gstElementStateChangeReturnGetName(
1243 : m_gstWrapper->gstElementGetStateReturn(element)),
1244 : m_gstWrapper->gstElementStateGetName(m_gstWrapper->gstElementGetStateNext(element)));
1245 :
1246 1 : m_gstWrapper->gstStateUnlock(element);
1247 1 : return -1;
1248 : }
1249 13 : m_gstWrapper->gstStateUnlock(element);
1250 :
1251 13 : gint64 position = -1;
1252 13 : if (!m_gstWrapper->gstElementQueryPosition(m_context.pipeline, GST_FORMAT_TIME, &position))
1253 : {
1254 1 : RIALTO_SERVER_LOG_WARN("Failed to query position");
1255 1 : return -1;
1256 : }
1257 :
1258 12 : return position;
1259 : }
1260 :
1261 1 : void GstGenericPlayer::setVideoGeometry(int x, int y, int width, int height)
1262 : {
1263 1 : if (m_workerThread)
1264 : {
1265 2 : m_workerThread->enqueueTask(
1266 2 : m_taskFactory->createSetVideoGeometry(m_context, *this, Rectangle{x, y, width, height}));
1267 : }
1268 1 : }
1269 :
1270 1 : void GstGenericPlayer::setEos(const firebolt::rialto::MediaSourceType &type)
1271 : {
1272 1 : if (m_workerThread)
1273 : {
1274 1 : m_workerThread->enqueueTask(m_taskFactory->createEos(m_context, *this, type));
1275 : }
1276 : }
1277 :
1278 4 : bool GstGenericPlayer::setVideoSinkRectangle()
1279 : {
1280 4 : bool result = false;
1281 4 : GstElement *videoSink{getSink(MediaSourceType::VIDEO)};
1282 4 : if (videoSink)
1283 : {
1284 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "rectangle"))
1285 : {
1286 : std::string rect =
1287 4 : std::to_string(m_context.pendingGeometry.x) + ',' + std::to_string(m_context.pendingGeometry.y) + ',' +
1288 6 : std::to_string(m_context.pendingGeometry.width) + ',' + std::to_string(m_context.pendingGeometry.height);
1289 2 : m_glibWrapper->gObjectSet(videoSink, "rectangle", rect.c_str(), nullptr);
1290 2 : m_context.pendingGeometry.clear();
1291 2 : result = true;
1292 : }
1293 : else
1294 : {
1295 1 : RIALTO_SERVER_LOG_ERROR("Failed to set the video rectangle");
1296 : }
1297 3 : m_gstWrapper->gstObjectUnref(videoSink);
1298 : }
1299 : else
1300 : {
1301 1 : RIALTO_SERVER_LOG_ERROR("Failed to set video rectangle, sink is NULL");
1302 : }
1303 :
1304 4 : return result;
1305 : }
1306 :
1307 3 : bool GstGenericPlayer::setImmediateOutput()
1308 : {
1309 3 : bool result{false};
1310 3 : if (m_context.pendingImmediateOutputForVideo.has_value())
1311 : {
1312 3 : GstElement *sink{getSink(MediaSourceType::VIDEO)};
1313 3 : if (sink)
1314 : {
1315 2 : bool immediateOutput{m_context.pendingImmediateOutputForVideo.value()};
1316 2 : RIALTO_SERVER_LOG_DEBUG("Set immediate-output to %s", immediateOutput ? "TRUE" : "FALSE");
1317 :
1318 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "immediate-output"))
1319 : {
1320 1 : gboolean immediateOutputGboolean{immediateOutput ? TRUE : FALSE};
1321 1 : m_glibWrapper->gObjectSet(sink, "immediate-output", immediateOutputGboolean, nullptr);
1322 1 : result = true;
1323 : }
1324 : else
1325 : {
1326 1 : RIALTO_SERVER_LOG_ERROR("Failed to set immediate-output property on sink '%s'", GST_ELEMENT_NAME(sink));
1327 : }
1328 2 : m_context.pendingImmediateOutputForVideo.reset();
1329 2 : m_gstWrapper->gstObjectUnref(sink);
1330 : }
1331 : else
1332 : {
1333 1 : RIALTO_SERVER_LOG_DEBUG("Pending an immediate-output, sink is NULL");
1334 : }
1335 : }
1336 3 : return result;
1337 : }
1338 :
1339 4 : bool GstGenericPlayer::setShowVideoWindow()
1340 : {
1341 4 : if (!m_context.pendingShowVideoWindow.has_value())
1342 : {
1343 1 : RIALTO_SERVER_LOG_WARN("No show video window value to be set. Aborting...");
1344 1 : return false;
1345 : }
1346 :
1347 3 : GstElement *videoSink{getSink(MediaSourceType::VIDEO)};
1348 3 : if (!videoSink)
1349 : {
1350 1 : RIALTO_SERVER_LOG_DEBUG("Setting show video window queued. Video sink is NULL");
1351 1 : return false;
1352 : }
1353 2 : bool result{false};
1354 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "show-video-window"))
1355 : {
1356 1 : m_glibWrapper->gObjectSet(videoSink, "show-video-window", m_context.pendingShowVideoWindow.value(), nullptr);
1357 1 : result = true;
1358 : }
1359 : else
1360 : {
1361 1 : RIALTO_SERVER_LOG_ERROR("Setting show video window failed. Property does not exist");
1362 : }
1363 2 : m_context.pendingShowVideoWindow.reset();
1364 2 : m_gstWrapper->gstObjectUnref(GST_OBJECT(videoSink));
1365 2 : return result;
1366 : }
1367 :
1368 4 : bool GstGenericPlayer::setLowLatency()
1369 : {
1370 4 : bool result{false};
1371 4 : if (m_context.pendingLowLatency.has_value())
1372 : {
1373 4 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1374 4 : if (sink)
1375 : {
1376 3 : bool lowLatency{m_context.pendingLowLatency.value()};
1377 3 : RIALTO_SERVER_LOG_DEBUG("Set low-latency to %s", lowLatency ? "TRUE" : "FALSE");
1378 :
1379 3 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "low-latency"))
1380 : {
1381 2 : gboolean lowLatencyGboolean{lowLatency ? TRUE : FALSE};
1382 2 : m_glibWrapper->gObjectSet(sink, "low-latency", lowLatencyGboolean, nullptr);
1383 2 : result = true;
1384 : }
1385 : else
1386 : {
1387 1 : RIALTO_SERVER_LOG_ERROR("Failed to set low-latency property on sink '%s'", GST_ELEMENT_NAME(sink));
1388 : }
1389 3 : m_context.pendingLowLatency.reset();
1390 3 : m_gstWrapper->gstObjectUnref(sink);
1391 : }
1392 : else
1393 : {
1394 1 : RIALTO_SERVER_LOG_DEBUG("Pending low-latency, sink is NULL");
1395 : }
1396 : }
1397 4 : return result;
1398 : }
1399 :
1400 3 : bool GstGenericPlayer::setSync()
1401 : {
1402 3 : bool result{false};
1403 3 : if (m_context.pendingSync.has_value())
1404 : {
1405 3 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1406 3 : if (sink)
1407 : {
1408 2 : bool sync{m_context.pendingSync.value()};
1409 2 : RIALTO_SERVER_LOG_DEBUG("Set sync to %s", sync ? "TRUE" : "FALSE");
1410 :
1411 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "sync"))
1412 : {
1413 1 : gboolean syncGboolean{sync ? TRUE : FALSE};
1414 1 : m_glibWrapper->gObjectSet(sink, "sync", syncGboolean, nullptr);
1415 1 : result = true;
1416 : }
1417 : else
1418 : {
1419 1 : RIALTO_SERVER_LOG_ERROR("Failed to set sync property on sink '%s'", GST_ELEMENT_NAME(sink));
1420 : }
1421 2 : m_context.pendingSync.reset();
1422 2 : m_gstWrapper->gstObjectUnref(sink);
1423 : }
1424 : else
1425 : {
1426 1 : RIALTO_SERVER_LOG_DEBUG("Pending sync, sink is NULL");
1427 : }
1428 : }
1429 3 : return result;
1430 : }
1431 :
1432 3 : bool GstGenericPlayer::setSyncOff()
1433 : {
1434 3 : bool result{false};
1435 3 : if (m_context.pendingSyncOff.has_value())
1436 : {
1437 3 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1438 3 : if (decoder)
1439 : {
1440 2 : bool syncOff{m_context.pendingSyncOff.value()};
1441 2 : RIALTO_SERVER_LOG_DEBUG("Set sync-off to %s", syncOff ? "TRUE" : "FALSE");
1442 :
1443 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "sync-off"))
1444 : {
1445 1 : gboolean syncOffGboolean{decoder ? TRUE : FALSE};
1446 1 : m_glibWrapper->gObjectSet(decoder, "sync-off", syncOffGboolean, nullptr);
1447 1 : result = true;
1448 : }
1449 : else
1450 : {
1451 1 : RIALTO_SERVER_LOG_ERROR("Failed to set sync-off property on decoder '%s'", GST_ELEMENT_NAME(decoder));
1452 : }
1453 2 : m_context.pendingSyncOff.reset();
1454 2 : m_gstWrapper->gstObjectUnref(decoder);
1455 : }
1456 : else
1457 : {
1458 1 : RIALTO_SERVER_LOG_DEBUG("Pending sync-off, decoder is NULL");
1459 : }
1460 : }
1461 3 : return result;
1462 : }
1463 :
1464 6 : bool GstGenericPlayer::setStreamSyncMode(const MediaSourceType &type)
1465 : {
1466 6 : bool result{false};
1467 6 : int32_t streamSyncMode{0};
1468 : {
1469 6 : std::unique_lock lock{m_context.propertyMutex};
1470 6 : if (m_context.pendingStreamSyncMode.find(type) == m_context.pendingStreamSyncMode.end())
1471 : {
1472 0 : return false;
1473 : }
1474 6 : streamSyncMode = m_context.pendingStreamSyncMode[type];
1475 : }
1476 6 : if (MediaSourceType::AUDIO == type)
1477 : {
1478 3 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
1479 3 : if (!decoder)
1480 : {
1481 1 : RIALTO_SERVER_LOG_DEBUG("Pending stream-sync-mode, decoder is NULL");
1482 1 : return false;
1483 : }
1484 :
1485 2 : RIALTO_SERVER_LOG_DEBUG("Set stream-sync-mode to %d", streamSyncMode);
1486 :
1487 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "stream-sync-mode"))
1488 : {
1489 1 : gint streamSyncModeGint{static_cast<gint>(streamSyncMode)};
1490 1 : m_glibWrapper->gObjectSet(decoder, "stream-sync-mode", streamSyncModeGint, nullptr);
1491 1 : result = true;
1492 : }
1493 : else
1494 : {
1495 1 : RIALTO_SERVER_LOG_ERROR("Failed to set stream-sync-mode property on decoder '%s'", GST_ELEMENT_NAME(decoder));
1496 : }
1497 2 : m_gstWrapper->gstObjectUnref(decoder);
1498 2 : std::unique_lock lock{m_context.propertyMutex};
1499 2 : m_context.pendingStreamSyncMode.erase(type);
1500 : }
1501 3 : else if (MediaSourceType::VIDEO == type)
1502 : {
1503 3 : GstElement *parser = getParser(MediaSourceType::VIDEO);
1504 3 : if (!parser)
1505 : {
1506 1 : RIALTO_SERVER_LOG_DEBUG("Pending syncmode-streaming, parser is NULL");
1507 1 : return false;
1508 : }
1509 :
1510 2 : gboolean streamSyncModeBoolean{static_cast<gboolean>(streamSyncMode)};
1511 2 : RIALTO_SERVER_LOG_DEBUG("Set syncmode-streaming to %d", streamSyncMode);
1512 :
1513 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(parser), "syncmode-streaming"))
1514 : {
1515 1 : m_glibWrapper->gObjectSet(parser, "syncmode-streaming", streamSyncModeBoolean, nullptr);
1516 1 : result = true;
1517 : }
1518 : else
1519 : {
1520 1 : RIALTO_SERVER_LOG_ERROR("Failed to set syncmode-streaming property on parser '%s'", GST_ELEMENT_NAME(parser));
1521 : }
1522 2 : m_gstWrapper->gstObjectUnref(parser);
1523 2 : std::unique_lock lock{m_context.propertyMutex};
1524 2 : m_context.pendingStreamSyncMode.erase(type);
1525 : }
1526 4 : return result;
1527 : }
1528 :
1529 3 : bool GstGenericPlayer::setRenderFrame()
1530 : {
1531 3 : bool result{false};
1532 3 : if (m_context.pendingRenderFrame)
1533 : {
1534 5 : static const std::string kStepOnPrerollPropertyName = "frame-step-on-preroll";
1535 3 : GstElement *sink{getSink(MediaSourceType::VIDEO)};
1536 3 : if (sink)
1537 : {
1538 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), kStepOnPrerollPropertyName.c_str()))
1539 : {
1540 1 : RIALTO_SERVER_LOG_INFO("Rendering preroll");
1541 :
1542 1 : m_glibWrapper->gObjectSet(sink, kStepOnPrerollPropertyName.c_str(), 1, nullptr);
1543 1 : m_gstWrapper->gstElementSendEvent(sink, m_gstWrapper->gstEventNewStep(GST_FORMAT_BUFFERS, 1, 1.0, true,
1544 : false));
1545 1 : m_glibWrapper->gObjectSet(sink, kStepOnPrerollPropertyName.c_str(), 0, nullptr);
1546 1 : result = true;
1547 : }
1548 : else
1549 : {
1550 1 : RIALTO_SERVER_LOG_ERROR("Video sink doesn't have property `%s`", kStepOnPrerollPropertyName.c_str());
1551 : }
1552 2 : m_gstWrapper->gstObjectUnref(sink);
1553 2 : m_context.pendingRenderFrame = false;
1554 : }
1555 : else
1556 : {
1557 1 : RIALTO_SERVER_LOG_DEBUG("Pending render frame, sink is NULL");
1558 : }
1559 : }
1560 3 : return result;
1561 : }
1562 :
1563 3 : bool GstGenericPlayer::setBufferingLimit()
1564 : {
1565 3 : bool result{false};
1566 3 : guint bufferingLimit{0};
1567 : {
1568 3 : std::unique_lock lock{m_context.propertyMutex};
1569 3 : if (!m_context.pendingBufferingLimit.has_value())
1570 : {
1571 0 : return false;
1572 : }
1573 3 : bufferingLimit = static_cast<guint>(m_context.pendingBufferingLimit.value());
1574 : }
1575 :
1576 3 : GstElement *decoder{getDecoder(MediaSourceType::AUDIO)};
1577 3 : if (decoder)
1578 : {
1579 2 : RIALTO_SERVER_LOG_DEBUG("Set limit-buffering-ms to %u", bufferingLimit);
1580 :
1581 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "limit-buffering-ms"))
1582 : {
1583 1 : m_glibWrapper->gObjectSet(decoder, "limit-buffering-ms", bufferingLimit, nullptr);
1584 1 : result = true;
1585 : }
1586 : else
1587 : {
1588 1 : RIALTO_SERVER_LOG_ERROR("Failed to set limit-buffering-ms property on decoder '%s'",
1589 : GST_ELEMENT_NAME(decoder));
1590 : }
1591 2 : m_gstWrapper->gstObjectUnref(decoder);
1592 2 : std::unique_lock lock{m_context.propertyMutex};
1593 2 : m_context.pendingBufferingLimit.reset();
1594 : }
1595 : else
1596 : {
1597 1 : RIALTO_SERVER_LOG_DEBUG("Pending limit-buffering-ms, decoder is NULL");
1598 : }
1599 3 : return result;
1600 : }
1601 :
1602 2 : bool GstGenericPlayer::setUseBuffering()
1603 : {
1604 2 : std::unique_lock lock{m_context.propertyMutex};
1605 2 : if (m_context.pendingUseBuffering.has_value())
1606 : {
1607 2 : if (m_context.playbackGroup.m_curAudioDecodeBin)
1608 : {
1609 1 : gboolean useBufferingGboolean{m_context.pendingUseBuffering.value() ? TRUE : FALSE};
1610 1 : RIALTO_SERVER_LOG_DEBUG("Set use-buffering to %d", useBufferingGboolean);
1611 1 : m_glibWrapper->gObjectSet(m_context.playbackGroup.m_curAudioDecodeBin, "use-buffering",
1612 : useBufferingGboolean, nullptr);
1613 1 : m_context.pendingUseBuffering.reset();
1614 1 : return true;
1615 : }
1616 : else
1617 : {
1618 1 : RIALTO_SERVER_LOG_DEBUG("Pending use-buffering, decodebin is NULL");
1619 : }
1620 : }
1621 1 : return false;
1622 2 : }
1623 :
1624 8 : bool GstGenericPlayer::setWesterossinkSecondaryVideo()
1625 : {
1626 8 : bool result = false;
1627 8 : GstElementFactory *factory = m_gstWrapper->gstElementFactoryFind("westerossink");
1628 8 : if (factory)
1629 : {
1630 7 : GstElement *videoSink = m_gstWrapper->gstElementFactoryCreate(factory, nullptr);
1631 7 : if (videoSink)
1632 : {
1633 5 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(videoSink), "res-usage"))
1634 : {
1635 4 : m_glibWrapper->gObjectSet(videoSink, "res-usage", 0x0u, nullptr);
1636 4 : m_glibWrapper->gObjectSet(m_context.pipeline, "video-sink", videoSink, nullptr);
1637 4 : result = true;
1638 : }
1639 : else
1640 : {
1641 1 : RIALTO_SERVER_LOG_ERROR("Failed to set the westerossink res-usage");
1642 1 : m_gstWrapper->gstObjectUnref(GST_OBJECT(videoSink));
1643 : }
1644 : }
1645 : else
1646 : {
1647 2 : RIALTO_SERVER_LOG_ERROR("Failed to create the westerossink");
1648 : }
1649 :
1650 7 : m_gstWrapper->gstObjectUnref(GST_OBJECT(factory));
1651 : }
1652 : else
1653 : {
1654 : // No westeros sink
1655 1 : result = true;
1656 : }
1657 :
1658 8 : return result;
1659 : }
1660 :
1661 8 : bool GstGenericPlayer::setErmContext()
1662 : {
1663 8 : bool result = false;
1664 8 : GstContext *context = m_gstWrapper->gstContextNew("erm", false);
1665 8 : if (context)
1666 : {
1667 6 : GstStructure *contextStructure = m_gstWrapper->gstContextWritableStructure(context);
1668 6 : if (contextStructure)
1669 : {
1670 5 : m_gstWrapper->gstStructureSet(contextStructure, "res-usage", G_TYPE_UINT, 0x0u, nullptr);
1671 5 : m_gstWrapper->gstElementSetContext(GST_ELEMENT(m_context.pipeline), context);
1672 5 : result = true;
1673 : }
1674 : else
1675 : {
1676 1 : RIALTO_SERVER_LOG_ERROR("Failed to create the erm structure");
1677 : }
1678 6 : m_gstWrapper->gstContextUnref(context);
1679 : }
1680 : else
1681 : {
1682 2 : RIALTO_SERVER_LOG_ERROR("Failed to create the erm context");
1683 : }
1684 :
1685 8 : return result;
1686 : }
1687 :
1688 6 : void GstGenericPlayer::startPositionReportingAndCheckAudioUnderflowTimer()
1689 : {
1690 6 : if (m_positionReportingAndCheckAudioUnderflowTimer && m_positionReportingAndCheckAudioUnderflowTimer->isActive())
1691 : {
1692 1 : return;
1693 : }
1694 :
1695 15 : m_positionReportingAndCheckAudioUnderflowTimer = m_timerFactory->createTimer(
1696 : kPositionReportTimerMs,
1697 10 : [this]()
1698 : {
1699 1 : if (m_workerThread)
1700 : {
1701 1 : m_workerThread->enqueueTask(m_taskFactory->createReportPosition(m_context, *this));
1702 1 : m_workerThread->enqueueTask(m_taskFactory->createCheckAudioUnderflow(m_context, *this));
1703 : }
1704 1 : },
1705 5 : firebolt::rialto::common::TimerType::PERIODIC);
1706 : }
1707 :
1708 4 : void GstGenericPlayer::stopPositionReportingAndCheckAudioUnderflowTimer()
1709 : {
1710 4 : if (m_positionReportingAndCheckAudioUnderflowTimer && m_positionReportingAndCheckAudioUnderflowTimer->isActive())
1711 : {
1712 1 : m_positionReportingAndCheckAudioUnderflowTimer->cancel();
1713 1 : m_positionReportingAndCheckAudioUnderflowTimer.reset();
1714 : }
1715 4 : }
1716 :
1717 6 : void GstGenericPlayer::startNotifyPlaybackInfoTimer()
1718 : {
1719 : static constexpr std::chrono::milliseconds kPlaybackInfoTimerMs{32};
1720 6 : if (m_playbackInfoTimer && m_playbackInfoTimer->isActive())
1721 : {
1722 1 : return;
1723 : }
1724 :
1725 5 : notifyPlaybackInfo();
1726 :
1727 : m_playbackInfoTimer =
1728 5 : m_timerFactory
1729 6 : ->createTimer(kPlaybackInfoTimerMs, [this]() { notifyPlaybackInfo(); }, firebolt::rialto::common::TimerType::PERIODIC);
1730 : }
1731 :
1732 3 : void GstGenericPlayer::stopNotifyPlaybackInfoTimer()
1733 : {
1734 3 : if (m_playbackInfoTimer && m_playbackInfoTimer->isActive())
1735 : {
1736 1 : m_playbackInfoTimer->cancel();
1737 1 : m_playbackInfoTimer.reset();
1738 : }
1739 3 : }
1740 :
1741 0 : void GstGenericPlayer::startSubtitleClockResyncTimer()
1742 : {
1743 0 : if (m_subtitleClockResyncTimer && m_subtitleClockResyncTimer->isActive())
1744 : {
1745 0 : return;
1746 : }
1747 :
1748 0 : m_subtitleClockResyncTimer = m_timerFactory->createTimer(
1749 : kSubtitleClockResyncInterval,
1750 0 : [this]()
1751 : {
1752 0 : if (m_workerThread)
1753 : {
1754 0 : m_workerThread->enqueueTask(m_taskFactory->createSynchroniseSubtitleClock(m_context, *this));
1755 : }
1756 0 : },
1757 0 : firebolt::rialto::common::TimerType::PERIODIC);
1758 : }
1759 :
1760 0 : void GstGenericPlayer::stopSubtitleClockResyncTimer()
1761 : {
1762 0 : if (m_subtitleClockResyncTimer && m_subtitleClockResyncTimer->isActive())
1763 : {
1764 0 : m_subtitleClockResyncTimer->cancel();
1765 0 : m_subtitleClockResyncTimer.reset();
1766 : }
1767 : }
1768 :
1769 2 : void GstGenericPlayer::stopWorkerThread()
1770 : {
1771 2 : if (m_workerThread)
1772 : {
1773 2 : m_workerThread->stop();
1774 : }
1775 : }
1776 :
1777 0 : void GstGenericPlayer::setPendingPlaybackRate()
1778 : {
1779 0 : RIALTO_SERVER_LOG_INFO("Setting pending playback rate");
1780 0 : setPlaybackRate(m_context.pendingPlaybackRate);
1781 : }
1782 :
1783 1 : void GstGenericPlayer::renderFrame()
1784 : {
1785 1 : if (m_workerThread)
1786 : {
1787 1 : m_workerThread->enqueueTask(m_taskFactory->createRenderFrame(m_context, *this));
1788 : }
1789 : }
1790 :
1791 18 : void GstGenericPlayer::setVolume(double targetVolume, uint32_t volumeDuration, firebolt::rialto::EaseType easeType)
1792 : {
1793 18 : if (m_workerThread)
1794 : {
1795 36 : m_workerThread->enqueueTask(
1796 36 : m_taskFactory->createSetVolume(m_context, *this, targetVolume, volumeDuration, easeType));
1797 : }
1798 18 : }
1799 :
1800 9 : bool GstGenericPlayer::getVolume(double ¤tVolume)
1801 : {
1802 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
1803 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
1804 9 : if (!m_context.pipeline)
1805 : {
1806 0 : return false;
1807 : }
1808 :
1809 : // NOTE: No gstreamer documentation for "fade-volume" could be found at the time this code was written.
1810 : // Therefore the author performed several tests on a supported platform (Flex2) to determine the behaviour of this property.
1811 : // The code has been written to be backwardly compatible on platforms that don't have this property.
1812 : // The observed behaviour was:
1813 : // - if the returned fade volume is negative then audio-fade is not active. In this case the usual technique
1814 : // to find volume in the pipeline works and is used.
1815 : // - if the returned fade volume is positive then audio-fade is active. In this case the returned fade volume
1816 : // directly returns the current volume level 0=min to 100=max (and the pipeline's current volume level is
1817 : // meaningless and doesn't contribute in this case).
1818 9 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1819 11 : if (m_context.audioFadeEnabled && sink &&
1820 2 : m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "fade-volume"))
1821 : {
1822 2 : gint fadeVolume{-100};
1823 2 : m_glibWrapper->gObjectGet(sink, "fade-volume", &fadeVolume, NULL);
1824 2 : if (fadeVolume < 0)
1825 : {
1826 1 : currentVolume = m_gstWrapper->gstStreamVolumeGetVolume(GST_STREAM_VOLUME(m_context.pipeline),
1827 : GST_STREAM_VOLUME_FORMAT_LINEAR);
1828 1 : RIALTO_SERVER_LOG_INFO("Fade volume is negative, using volume from pipeline: %f", currentVolume);
1829 : }
1830 : else
1831 : {
1832 1 : currentVolume = static_cast<double>(fadeVolume) / 100.0;
1833 1 : RIALTO_SERVER_LOG_INFO("Fade volume is supported: %f", currentVolume);
1834 : }
1835 : }
1836 : else
1837 : {
1838 7 : currentVolume = m_gstWrapper->gstStreamVolumeGetVolume(GST_STREAM_VOLUME(m_context.pipeline),
1839 : GST_STREAM_VOLUME_FORMAT_LINEAR);
1840 7 : RIALTO_SERVER_LOG_INFO("Fade volume is not supported, using volume from pipeline: %f", currentVolume);
1841 : }
1842 :
1843 9 : if (sink)
1844 2 : m_gstWrapper->gstObjectUnref(sink);
1845 :
1846 9 : return true;
1847 : }
1848 :
1849 1 : void GstGenericPlayer::setMute(const MediaSourceType &mediaSourceType, bool mute)
1850 : {
1851 1 : if (m_workerThread)
1852 : {
1853 1 : m_workerThread->enqueueTask(m_taskFactory->createSetMute(m_context, *this, mediaSourceType, mute));
1854 : }
1855 : }
1856 :
1857 5 : bool GstGenericPlayer::getMute(const MediaSourceType &mediaSourceType, bool &mute)
1858 : {
1859 : // We are on main thread here, but m_context.pipeline can be used, because it's modified only in GstGenericPlayer
1860 : // constructor and destructor. GstGenericPlayer is created/destructed on main thread, so we won't have a crash here.
1861 5 : if (mediaSourceType == MediaSourceType::SUBTITLE)
1862 : {
1863 2 : if (!m_context.subtitleSink)
1864 : {
1865 1 : RIALTO_SERVER_LOG_ERROR("There is no subtitle sink");
1866 1 : return false;
1867 : }
1868 1 : gboolean muteValue{FALSE};
1869 1 : m_glibWrapper->gObjectGet(m_context.subtitleSink, "mute", &muteValue, nullptr);
1870 1 : mute = muteValue;
1871 : }
1872 3 : else if (mediaSourceType == MediaSourceType::AUDIO)
1873 : {
1874 2 : if (!m_context.pipeline)
1875 : {
1876 1 : return false;
1877 : }
1878 1 : mute = m_gstWrapper->gstStreamVolumeGetMute(GST_STREAM_VOLUME(m_context.pipeline));
1879 : }
1880 : else
1881 : {
1882 1 : RIALTO_SERVER_LOG_ERROR("Getting mute for type %s unsupported", common::convertMediaSourceType(mediaSourceType));
1883 1 : return false;
1884 : }
1885 :
1886 2 : return true;
1887 : }
1888 :
1889 1 : bool GstGenericPlayer::isAsync(const MediaSourceType &mediaSourceType) const
1890 : {
1891 1 : GstElement *sink = getSink(mediaSourceType);
1892 1 : if (!sink)
1893 : {
1894 0 : RIALTO_SERVER_LOG_WARN("Sink not found for %s", common::convertMediaSourceType(mediaSourceType));
1895 0 : return true; // Our sinks are async by default
1896 : }
1897 1 : gboolean returnValue{TRUE};
1898 1 : m_glibWrapper->gObjectGet(sink, "async", &returnValue, nullptr);
1899 1 : m_gstWrapper->gstObjectUnref(sink);
1900 1 : return returnValue == TRUE;
1901 : }
1902 :
1903 1 : void GstGenericPlayer::setTextTrackIdentifier(const std::string &textTrackIdentifier)
1904 : {
1905 1 : if (m_workerThread)
1906 : {
1907 1 : m_workerThread->enqueueTask(m_taskFactory->createSetTextTrackIdentifier(m_context, textTrackIdentifier));
1908 : }
1909 : }
1910 :
1911 3 : bool GstGenericPlayer::getTextTrackIdentifier(std::string &textTrackIdentifier)
1912 : {
1913 3 : if (!m_context.subtitleSink)
1914 : {
1915 1 : RIALTO_SERVER_LOG_ERROR("There is no subtitle sink");
1916 1 : return false;
1917 : }
1918 :
1919 2 : gchar *identifier = nullptr;
1920 2 : m_glibWrapper->gObjectGet(m_context.subtitleSink, "text-track-identifier", &identifier, nullptr);
1921 :
1922 2 : if (identifier)
1923 : {
1924 1 : textTrackIdentifier = identifier;
1925 1 : m_glibWrapper->gFree(identifier);
1926 1 : return true;
1927 : }
1928 : else
1929 : {
1930 1 : RIALTO_SERVER_LOG_ERROR("Failed to get text track identifier");
1931 1 : return false;
1932 : }
1933 : }
1934 :
1935 1 : bool GstGenericPlayer::setLowLatency(bool lowLatency)
1936 : {
1937 1 : if (m_workerThread)
1938 : {
1939 1 : m_workerThread->enqueueTask(m_taskFactory->createSetLowLatency(m_context, *this, lowLatency));
1940 : }
1941 1 : return true;
1942 : }
1943 :
1944 1 : bool GstGenericPlayer::setSync(bool sync)
1945 : {
1946 1 : if (m_workerThread)
1947 : {
1948 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSync(m_context, *this, sync));
1949 : }
1950 1 : return true;
1951 : }
1952 :
1953 4 : bool GstGenericPlayer::getSync(bool &sync)
1954 : {
1955 4 : bool returnValue{false};
1956 4 : GstElement *sink{getSink(MediaSourceType::AUDIO)};
1957 4 : if (sink)
1958 : {
1959 2 : if (m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(sink), "sync"))
1960 : {
1961 1 : m_glibWrapper->gObjectGet(sink, "sync", &sync, nullptr);
1962 1 : returnValue = true;
1963 : }
1964 : else
1965 : {
1966 1 : RIALTO_SERVER_LOG_ERROR("Sync not supported in sink '%s'", GST_ELEMENT_NAME(sink));
1967 : }
1968 2 : m_gstWrapper->gstObjectUnref(sink);
1969 : }
1970 2 : else if (m_context.pendingSync.has_value())
1971 : {
1972 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
1973 1 : sync = m_context.pendingSync.value();
1974 1 : returnValue = true;
1975 : }
1976 : else
1977 : {
1978 : // We dont know the default setting on the sync, so return failure here
1979 1 : RIALTO_SERVER_LOG_WARN("No audio sink attached or queued value");
1980 : }
1981 :
1982 4 : return returnValue;
1983 : }
1984 :
1985 1 : bool GstGenericPlayer::setSyncOff(bool syncOff)
1986 : {
1987 1 : if (m_workerThread)
1988 : {
1989 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSyncOff(m_context, *this, syncOff));
1990 : }
1991 1 : return true;
1992 : }
1993 :
1994 1 : bool GstGenericPlayer::setStreamSyncMode(const MediaSourceType &mediaSourceType, int32_t streamSyncMode)
1995 : {
1996 1 : if (m_workerThread)
1997 : {
1998 2 : m_workerThread->enqueueTask(
1999 2 : m_taskFactory->createSetStreamSyncMode(m_context, *this, mediaSourceType, streamSyncMode));
2000 : }
2001 1 : return true;
2002 : }
2003 :
2004 5 : bool GstGenericPlayer::getStreamSyncMode(int32_t &streamSyncMode)
2005 : {
2006 5 : bool returnValue{false};
2007 5 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
2008 5 : if (decoder && m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "stream-sync-mode"))
2009 : {
2010 2 : m_glibWrapper->gObjectGet(decoder, "stream-sync-mode", &streamSyncMode, nullptr);
2011 2 : returnValue = true;
2012 : }
2013 : else
2014 : {
2015 3 : std::unique_lock lock{m_context.propertyMutex};
2016 3 : if (m_context.pendingStreamSyncMode.find(MediaSourceType::AUDIO) != m_context.pendingStreamSyncMode.end())
2017 : {
2018 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
2019 1 : streamSyncMode = m_context.pendingStreamSyncMode[MediaSourceType::AUDIO];
2020 1 : returnValue = true;
2021 : }
2022 : else
2023 : {
2024 2 : RIALTO_SERVER_LOG_ERROR("Stream sync mode not supported in decoder '%s'",
2025 : (decoder ? GST_ELEMENT_NAME(decoder) : "null"));
2026 : }
2027 3 : }
2028 :
2029 5 : if (decoder)
2030 3 : m_gstWrapper->gstObjectUnref(GST_OBJECT(decoder));
2031 :
2032 5 : return returnValue;
2033 : }
2034 :
2035 1 : void GstGenericPlayer::ping(std::unique_ptr<IHeartbeatHandler> &&heartbeatHandler)
2036 : {
2037 1 : if (m_workerThread)
2038 : {
2039 1 : m_workerThread->enqueueTask(m_taskFactory->createPing(std::move(heartbeatHandler)));
2040 : }
2041 : }
2042 :
2043 1 : void GstGenericPlayer::flush(const MediaSourceType &mediaSourceType, bool resetTime, bool &async)
2044 : {
2045 1 : if (m_workerThread)
2046 : {
2047 1 : async = isAsync(mediaSourceType);
2048 1 : m_flushWatcher->setFlushing(mediaSourceType, async);
2049 1 : m_workerThread->enqueueTask(m_taskFactory->createFlush(m_context, *this, mediaSourceType, resetTime, async));
2050 : }
2051 : }
2052 :
2053 1 : void GstGenericPlayer::setSourcePosition(const MediaSourceType &mediaSourceType, int64_t position, bool resetTime,
2054 : double appliedRate, uint64_t stopPosition)
2055 : {
2056 1 : if (m_workerThread)
2057 : {
2058 1 : m_workerThread->enqueueTask(m_taskFactory->createSetSourcePosition(m_context, mediaSourceType, position,
2059 : resetTime, appliedRate, stopPosition));
2060 : }
2061 : }
2062 :
2063 0 : void GstGenericPlayer::setSubtitleOffset(int64_t position)
2064 : {
2065 0 : if (m_workerThread)
2066 : {
2067 0 : m_workerThread->enqueueTask(m_taskFactory->createSetSubtitleOffset(m_context, position));
2068 : }
2069 : }
2070 :
2071 1 : void GstGenericPlayer::processAudioGap(int64_t position, uint32_t duration, int64_t discontinuityGap, bool audioAac)
2072 : {
2073 1 : if (m_workerThread)
2074 : {
2075 2 : m_workerThread->enqueueTask(
2076 2 : m_taskFactory->createProcessAudioGap(m_context, position, duration, discontinuityGap, audioAac));
2077 : }
2078 1 : }
2079 :
2080 1 : void GstGenericPlayer::setBufferingLimit(uint32_t limitBufferingMs)
2081 : {
2082 1 : if (m_workerThread)
2083 : {
2084 1 : m_workerThread->enqueueTask(m_taskFactory->createSetBufferingLimit(m_context, *this, limitBufferingMs));
2085 : }
2086 : }
2087 :
2088 5 : bool GstGenericPlayer::getBufferingLimit(uint32_t &limitBufferingMs)
2089 : {
2090 5 : bool returnValue{false};
2091 5 : GstElement *decoder = getDecoder(MediaSourceType::AUDIO);
2092 5 : if (decoder && m_glibWrapper->gObjectClassFindProperty(G_OBJECT_GET_CLASS(decoder), "limit-buffering-ms"))
2093 : {
2094 2 : m_glibWrapper->gObjectGet(decoder, "limit-buffering-ms", &limitBufferingMs, nullptr);
2095 2 : returnValue = true;
2096 : }
2097 : else
2098 : {
2099 3 : std::unique_lock lock{m_context.propertyMutex};
2100 3 : if (m_context.pendingBufferingLimit.has_value())
2101 : {
2102 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
2103 1 : limitBufferingMs = m_context.pendingBufferingLimit.value();
2104 1 : returnValue = true;
2105 : }
2106 : else
2107 : {
2108 2 : RIALTO_SERVER_LOG_ERROR("buffering limit not supported in decoder '%s'",
2109 : (decoder ? GST_ELEMENT_NAME(decoder) : "null"));
2110 : }
2111 3 : }
2112 :
2113 5 : if (decoder)
2114 3 : m_gstWrapper->gstObjectUnref(GST_OBJECT(decoder));
2115 :
2116 5 : return returnValue;
2117 : }
2118 :
2119 1 : void GstGenericPlayer::setUseBuffering(bool useBuffering)
2120 : {
2121 1 : if (m_workerThread)
2122 : {
2123 1 : m_workerThread->enqueueTask(m_taskFactory->createSetUseBuffering(m_context, *this, useBuffering));
2124 : }
2125 : }
2126 :
2127 3 : bool GstGenericPlayer::getUseBuffering(bool &useBuffering)
2128 : {
2129 3 : if (m_context.playbackGroup.m_curAudioDecodeBin)
2130 : {
2131 1 : m_glibWrapper->gObjectGet(m_context.playbackGroup.m_curAudioDecodeBin, "use-buffering", &useBuffering, nullptr);
2132 1 : return true;
2133 : }
2134 : else
2135 : {
2136 2 : std::unique_lock lock{m_context.propertyMutex};
2137 2 : if (m_context.pendingUseBuffering.has_value())
2138 : {
2139 1 : RIALTO_SERVER_LOG_DEBUG("Returning queued value");
2140 1 : useBuffering = m_context.pendingUseBuffering.value();
2141 1 : return true;
2142 : }
2143 2 : }
2144 1 : return false;
2145 : }
2146 :
2147 1 : void GstGenericPlayer::switchSource(const std::unique_ptr<IMediaPipeline::MediaSource> &mediaSource)
2148 : {
2149 1 : if (m_workerThread)
2150 : {
2151 1 : m_workerThread->enqueueTask(m_taskFactory->createSwitchSource(*this, mediaSource));
2152 : }
2153 : }
2154 :
2155 1 : void GstGenericPlayer::handleBusMessage(GstMessage *message)
2156 : {
2157 1 : m_workerThread->enqueueTask(m_taskFactory->createHandleBusMessage(m_context, *this, message, *m_flushWatcher));
2158 : }
2159 :
2160 1 : void GstGenericPlayer::updatePlaybackGroup(GstElement *typefind, const GstCaps *caps)
2161 : {
2162 1 : m_workerThread->enqueueTask(m_taskFactory->createUpdatePlaybackGroup(m_context, *this, typefind, caps));
2163 : }
2164 :
2165 3 : void GstGenericPlayer::addAutoVideoSinkChild(GObject *object)
2166 : {
2167 : // Only add children that are sinks
2168 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2169 : {
2170 2 : RIALTO_SERVER_LOG_DEBUG("Store AutoVideoSink child sink");
2171 :
2172 2 : if (m_context.autoVideoChildSink && m_context.autoVideoChildSink != GST_ELEMENT(object))
2173 : {
2174 1 : RIALTO_SERVER_LOG_MIL("AutoVideoSink child is been overwritten");
2175 : }
2176 2 : m_context.autoVideoChildSink = GST_ELEMENT(object);
2177 : }
2178 3 : }
2179 :
2180 3 : void GstGenericPlayer::addAutoAudioSinkChild(GObject *object)
2181 : {
2182 : // Only add children that are sinks
2183 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2184 : {
2185 2 : RIALTO_SERVER_LOG_DEBUG("Store AutoAudioSink child sink");
2186 :
2187 2 : if (m_context.autoAudioChildSink && m_context.autoAudioChildSink != GST_ELEMENT(object))
2188 : {
2189 1 : RIALTO_SERVER_LOG_MIL("AutoAudioSink child is been overwritten");
2190 : }
2191 2 : m_context.autoAudioChildSink = GST_ELEMENT(object);
2192 : }
2193 3 : }
2194 :
2195 3 : void GstGenericPlayer::removeAutoVideoSinkChild(GObject *object)
2196 : {
2197 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2198 : {
2199 3 : RIALTO_SERVER_LOG_DEBUG("Remove AutoVideoSink child sink");
2200 :
2201 3 : if (m_context.autoVideoChildSink && m_context.autoVideoChildSink != GST_ELEMENT(object))
2202 : {
2203 1 : RIALTO_SERVER_LOG_MIL("AutoVideoSink child sink is not the same as the one stored");
2204 1 : return;
2205 : }
2206 :
2207 2 : m_context.autoVideoChildSink = nullptr;
2208 : }
2209 : }
2210 :
2211 3 : void GstGenericPlayer::removeAutoAudioSinkChild(GObject *object)
2212 : {
2213 3 : if (GST_OBJECT_FLAG_IS_SET(GST_ELEMENT(object), GST_ELEMENT_FLAG_SINK))
2214 : {
2215 3 : RIALTO_SERVER_LOG_DEBUG("Remove AutoAudioSink child sink");
2216 :
2217 3 : if (m_context.autoAudioChildSink && m_context.autoAudioChildSink != GST_ELEMENT(object))
2218 : {
2219 1 : RIALTO_SERVER_LOG_MIL("AutoAudioSink child sink is not the same as the one stored");
2220 1 : return;
2221 : }
2222 :
2223 2 : m_context.autoAudioChildSink = nullptr;
2224 : }
2225 : }
2226 :
2227 14 : GstElement *GstGenericPlayer::getSinkChildIfAutoVideoSink(GstElement *sink) const
2228 : {
2229 14 : const gchar *kTmpName = m_glibWrapper->gTypeName(G_OBJECT_TYPE(sink));
2230 14 : if (!kTmpName)
2231 0 : return sink;
2232 :
2233 28 : const std::string kElementTypeName{kTmpName};
2234 14 : if (kElementTypeName == "GstAutoVideoSink")
2235 : {
2236 1 : if (!m_context.autoVideoChildSink)
2237 : {
2238 0 : RIALTO_SERVER_LOG_WARN("No child sink has been added to the autovideosink");
2239 : }
2240 : else
2241 : {
2242 1 : return m_context.autoVideoChildSink;
2243 : }
2244 : }
2245 13 : return sink;
2246 14 : }
2247 :
2248 11 : GstElement *GstGenericPlayer::getSinkChildIfAutoAudioSink(GstElement *sink) const
2249 : {
2250 11 : const gchar *kTmpName = m_glibWrapper->gTypeName(G_OBJECT_TYPE(sink));
2251 11 : if (!kTmpName)
2252 0 : return sink;
2253 :
2254 22 : const std::string kElementTypeName{kTmpName};
2255 11 : if (kElementTypeName == "GstAutoAudioSink")
2256 : {
2257 1 : if (!m_context.autoAudioChildSink)
2258 : {
2259 0 : RIALTO_SERVER_LOG_WARN("No child sink has been added to the autoaudiosink");
2260 : }
2261 : else
2262 : {
2263 1 : return m_context.autoAudioChildSink;
2264 : }
2265 : }
2266 10 : return sink;
2267 11 : }
2268 :
2269 212 : void GstGenericPlayer::setPlaybinFlags(bool enableAudio)
2270 : {
2271 212 : unsigned flags = getGstPlayFlag("video") | getGstPlayFlag("native-video") | getGstPlayFlag("text");
2272 :
2273 212 : if (enableAudio)
2274 : {
2275 212 : flags |= getGstPlayFlag("audio");
2276 212 : flags |= shouldEnableNativeAudio() ? getGstPlayFlag("native-audio") : 0;
2277 : }
2278 :
2279 212 : m_glibWrapper->gObjectSet(m_context.pipeline, "flags", flags, nullptr);
2280 : }
2281 :
2282 212 : bool GstGenericPlayer::shouldEnableNativeAudio()
2283 : {
2284 212 : GstElementFactory *factory = m_gstWrapper->gstElementFactoryFind("brcmaudiosink");
2285 212 : if (factory)
2286 : {
2287 1 : m_gstWrapper->gstObjectUnref(GST_OBJECT(factory));
2288 1 : return true;
2289 : }
2290 211 : return false;
2291 : }
2292 :
2293 : }; // namespace firebolt::rialto::server
|