Gstreamer pipeline wont play same video consecutively

Hello,

I’m writing a Gstreamer pipeline using PyGST and Gst.parse_launch. The pipeline plays 4k video and fades the videos in and out at runtime.

My issue is the pipeline wont play the same video consecutively. It will play different videos one after another just fine. The fade functionality is also working well.

Here is a description of the problem:

The pipeline works the first time with an example video such as video_1.mp4. When I play the same video again it will get stuck on the first frame, stay on that frame and exit the process after a few seconds.

If I play video_1.mp4 and then play a different video like video_2.mp4 then the pipeline will work with no issues. If after this I play back video_1.mp4 then it will also play all the way through.

video_1.mp4 has a duration of 10 seconds. If I stop video_1.mp4 at 5 seconds and play the video again, the first frame of video_1.mp4 will display, it will wait at that frame for 5 seconds and then continue decoding the video at the same point where the previous pipeline left off.

My suspicion is the buffer running time has not been reset back to 0 when the same video plays. Im unsure of why this happens, I set the pipeline to NULL and I have tried doing a seek position 0 and flushing the elements in the pipeline after the video finishes to reset the clock time back to 0. Perhaps the syntax or the way i’m implementing it is incorrect.

This thread below creates the pipeline

import gi
import time
gi.require_version('Gst', '1.0')
gi.require_version('GstPbutils', '1.0')
gi.require_version('GstController', '1.0')
from gi.repository import GObject, Gst, GstPbutils, GstController
from player_thread import PlayerThread
from threading import Timer
import logging
import time

class VideoPlayer(): 
    def __init__(self):
        Gst.init(None)
        GObject.threads_init()

        self.pipeline = Gst.parse_launch ("""
            nvcompositor name=comp sink_0::alpha=1 ! video/x-raw(memory:NVMM),format=RGBA ! 
            nvvidconv ! video/x-raw(memory:NVMM), format=NV12 ! autovideosink name=sinky uridecodebin name=vidsrc !
            nvvidconv name=pipeconv ! video/x-raw(memory:NVMM),format=RGBA,pixel-aspect-ratio=1/1 ! queue ! comp.sink_0
        """)

        self.loop = GObject.MainLoop()
        self.bus = self.pipeline.get_bus()
        self.bus.add_signal_watch()
        self.bus.connect("message::eos", self.bus_call, self.loop)
        self.bus.connect("message::error", self.bus_call, self.loop)
        self.sink  = self.pipeline.get_by_name('sinky')
        self.convert = self.pipeline.get_by_name('pipeconv')
        self.source = self.pipeline.get_by_name('vidsrc')
        self.compositor = self.pipeline.get_by_name('comp')
        self.source.connect("pad-added", self.on_pad_added)
        self.running = False
        self.pipeline.set_state(Gst.State.NULL)
        self.loop.run()

    def get_alpha_controller(self, incoming_pad):
        self.pad = incoming_pad
        self.control_source = GstController.InterpolationControlSource()
        self.control_source.set_property('mode', GstController.InterpolationMode.LINEAR)
        self.control_bind = GstController.DirectControlBinding.new(self.pad, 'alpha', self.control_source)
        self.pad.add_control_binding(self.control_bind)
        return self.control_source

    def fade_video_in(self):
        self.compositor_sink_pad = self.compositor.get_static_pad('sink_0')
        self.control_source = self.get_alpha_controller(self.compositor_sink_pad)
        self.control_source.set(0*Gst.SECOND, 0)
        self.control_source.set(2*Gst.SECOND, 1)

    def fade_video_out(self):
            self.pos = self.pipeline.query_position(Gst.Format.TIME).cur 
            self.control_source.set(self.pos, 1)
            self.control_source.set(self.pos + 1*Gst.SECOND, 0)

    def on_pad_added(self, src, new_pad):
        print(
            "Received new pad '{0:s}' from '{1:s}'".format(
                new_pad.get_name(),
                src.get_name()))
        
        new_pad_caps = new_pad.get_current_caps()
        new_pad_struct = new_pad_caps.get_structure(0)
        new_pad_type = new_pad_struct.get_name()

        if new_pad_type.startswith("video/x-raw"):
            sink_pad = self.convert.get_static_pad("sink")
        else:
            print(
                "It has type '{0:s}' which is not raw audio/video. Ignoring.".format(new_pad_type))
            return

        # if our converter is already linked, we have nothing to do here
        # if(sink_pad.is_linked()):
        #     print("We are already linked. Ignoring.")
        #     return

        # attempt the link
        ret = new_pad.link(sink_pad)

        if not ret == Gst.PadLinkReturn.OK:
            print("Type is '{0:s}}' but link failed".format(new_pad_type))
        else:
            print("Link succeeded (type '{0:s}')".format(new_pad_type))

        return

    def play_video(self, video_url, scene_name):
        self.compositor.props.background = 1
        self.running = True
        self.scene_name = scene_name 
        self.video_url = video_url  
        print(self.video_url)
        self.source.props.uri = self.video_url
        self.sink.seek_simple(Gst.Format.TIME, Gst.SeekFlags.FLUSH, 0 * Gst.SECOND)
        self.pipeline.set_state(Gst.State.READY)
        self.pipeline.get_state(Gst.CLOCK_TIME_NONE)
        self.pipeline.set_state(Gst.State.PAUSED)
        self.pipeline.set_state(Gst.State.PLAYING)

    def stop_video(self):
        self.running = False
        self.pipeline.set_state(Gst.State.NULL)
        self.pipeline.set_state(Gst.State.READY)
        self.pipeline.seek_simple(Gst.Format.TIME, Gst.SeekFlags.FLUSH | Gst.SeekFlags.KEY_UNIT, 0.0 * Gst.SECOND )
        self.source.props.uri = ""
 

    def bus_call(self, bus, message, loop):
        t = message.type
        if t == Gst.MessageType.EOS:
            self.stop_video()

        elif t == Gst.MessageType.ERROR:
            print(message.parse_error())
        elif message.type == Gst.MessageType.SEGMENT_DONE:
            # self.fade_video_out()
            pass
        else:
                # should not get here
                print("ERROR: Unexpected message received")
        return True

    def exit(self):
        self.loop.quit()

Attaching GST Debug logs below

What the logs look like when pipeline is stuck

0:04:54.215492099  1756   0x7f5c0071e0 WARN              aggregator gstaggregator.c:1717:gst_aggregator_query_latency_unlocked:<comp> Latency query failed
0:04:54.216959847  1756   0x7f70007460 WARN                 basesrc gstbasesrc.c:3583:gst_base_src_start_complete:<source> pad not activated yet
0:04:54.217743982  1756   0x7f70007460 WARN                 basesrc gstbasesrc.c:3583:gst_base_src_start_complete:<source> pad not activated yet
sending message {"action": "STATE_CHANGE", "body": {"videoActiveVideo": "test"}}
0:04:54.222492287  1756     0x106f2ed0 WARN                 qtdemux qtdemux_types.c:233:qtdemux_type_get: unknown QuickTime node type pasp
0:04:54.222566247  1756     0x106f2ed0 WARN                 qtdemux qtdemux.c:3031:qtdemux_parse_trex:<qtdemux10> failed to find fragment defaults for stream 1
Opening in BLOCKING MODE 
0:04:54.268433832  1756     0x106feca0 WARN                    v4l2 gstv4l2object.c:4447:gst_v4l2_object_probe_caps:<nvv4l2decoder10:src> Failed to probe pixel aspect ratio with VIDIOC_CROPCAP: Unknown error -1
0:04:54.268509719  1756     0x106feca0 WARN                    v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x7f5802e8e0 Failed to determine interlace mode
0:04:54.268563887  1756     0x106feca0 WARN                    v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x7f5802e8e0 Failed to determine interlace mode
0:04:54.268623108  1756     0x106feca0 WARN                    v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x7f5802e8e0 Failed to determine interlace mode
NvMMLiteOpen : Block : BlockType = 261 
NVMEDIA: Reading vendor.tegra.display-size : status: 6 
NvMMLiteBlockCreate : Block : BlockType = 261 
0:04:54.374568554  1756     0x106feca0 WARN                    v4l2 gstv4l2object.c:4447:gst_v4l2_object_probe_caps:<nvv4l2decoder10:src> Failed to probe pixel aspect ratio with VIDIOC_CROPCAP: Unknown error -1
0:04:54.374737725  1756     0x106feca0 WARN                    v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x7f5802e8e0 Failed to determine interlace mode
0:04:54.374902417  1756     0x106feca0 WARN                    v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x7f5802e8e0 Failed to determine interlace mode
0:04:54.375223936  1756     0x106feca0 WARN                    v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x7f5802e8e0 Failed to determine interlace mode
Received new pad 'src_15' from 'vidsrc'
Link succeeded (type 'video/x-raw')
0:04:54.385219204  1756     0x106feca0 WARN            v4l2videodec gstv4l2videodec.c:1673:gst_v4l2_video_dec_decide_allocation:<nvv4l2decoder10> Duration invalid, not setting latency
0:04:54.385676977  1756     0x106feca0 WARN          v4l2bufferpool gstv4l2bufferpool.c:1065:gst_v4l2_buffer_pool_start:<nvv4l2decoder10:pool:src> Uncertain or not enough buffers, enabling copy threshold
0:04:54.392161109  1756     0x106f6630 WARN          v4l2bufferpool gstv4l2bufferpool.c:1512:gst_v4l2_buffer_pool_dqbuf:<nvv4l2decoder10:pool:src> Driver should never set v4l2_buffer.field to ANY
0:04:54.394550757  1756   0x7f5c0071e0 FIXME               basesink gstbasesink.c:3145:gst_base_sink_default_event:<sinky-actual-sink-nvoverlay> stream-start event without group-id. Consider implementing group-id handling in the upstream elements
0:04:54.400374455  1756   0x7f5c0071e0 WARN            nvcompositor gstnvcompositor.c:980:gst_nvcompositor_negotiated_caps:<comp> Release old pool

0:05:51.707439047  1756   0x7f7800e540 WARN              aggregator gstaggregator.c:1717:gst_aggregator_query_latency_unlocked:<comp> Latency query failed
0:05:51.709119977  1756   0x7f70007460 WARN                 basesrc gstbasesrc.c:3583:gst_base_src_start_complete:<source> pad not activated yet
0:05:51.709991980  1756   0x7f70007460 WARN                 basesrc gstbasesrc.c:3583:gst_base_src_start_complete:<source> pad not activated yet
sending message {"action": "STATE_CHANGE", "body": {"videoActiveVideo": "test"}}
0:05:51.714088392  1756     0x106e9630 WARN                 qtdemux qtdemux_types.c:233:qtdemux_type_get: unknown QuickTime node type pasp
0:05:51.714195686  1756     0x106e9630 WARN                 qtdemux qtdemux.c:3031:qtdemux_parse_trex:<qtdemux11> failed to find fragment defaults for stream 1
Opening in BLOCKING MODE 
0:05:51.759452369  1756     0x106fae80 WARN                    v4l2 gstv4l2object.c:4447:gst_v4l2_object_probe_caps:<nvv4l2decoder11:src> Failed to probe pixel aspect ratio with VIDIOC_CROPCAP: Unknown error -1
0:05:51.759527839  1756     0x106fae80 WARN                    v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x7f64022090 Failed to determine interlace mode
0:05:51.759581956  1756     0x106fae80 WARN                    v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x7f64022090 Failed to determine interlace mode
0:05:51.759638728  1756     0x106fae80 WARN                    v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x7f64022090 Failed to determine interlace mode
NvMMLiteOpen : Block : BlockType = 261 
NVMEDIA: Reading vendor.tegra.display-size : status: 6 
NvMMLiteBlockCreate : Block : BlockType = 261 
0:05:51.865431565  1756     0x106fae80 WARN                    v4l2 gstv4l2object.c:4447:gst_v4l2_object_probe_caps:<nvv4l2decoder11:src> Failed to probe pixel aspect ratio with VIDIOC_CROPCAP: Unknown error -1
0:05:51.865703395  1756     0x106fae80 WARN                    v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x7f64022090 Failed to determine interlace mode
0:05:51.865971579  1756     0x106fae80 WARN                    v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x7f64022090 Failed to determine interlace mode
0:05:51.866240232  1756     0x106fae80 WARN                    v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x7f64022090 Failed to determine interlace mode
Received new pad 'src_16' from 'vidsrc'
Link succeeded (type 'video/x-raw')
0:05:51.876923175  1756     0x106fae80 WARN            v4l2videodec gstv4l2videodec.c:1673:gst_v4l2_video_dec_decide_allocation:<nvv4l2decoder11> Duration invalid, not setting latency
0:05:51.877534077  1756     0x106fae80 WARN          v4l2bufferpool gstv4l2bufferpool.c:1065:gst_v4l2_buffer_pool_start:<nvv4l2decoder11:pool:src> Uncertain or not enough buffers, enabling copy threshold
0:05:51.883864038  1756   0x7f5c007850 WARN          v4l2bufferpool gstv4l2bufferpool.c:1512:gst_v4l2_buffer_pool_dqbuf:<nvv4l2decoder11:pool:src> Driver should never set v4l2_buffer.field to ANY
0:05:51.891684769  1756   0x7f7800e540 FIXME               basesink gstbasesink.c:3145:gst_base_sink_default_event:<sinky-actual-sink-nvoverlay> stream-start event without group-id. Consider implementing group-id handling in the upstream elements
0:05:51.893670863  1756   0x7f7800e540 WARN            nvcompositor gstnvcompositor.c:980:gst_nvcompositor_negotiated_caps:<comp> Release old pool```


Below are the logs when the pipeline resets with a new video (pipeline working correctly)

0:06:21.907924995  1756   0x7f580cc320 WARN              aggregator gstaggregator.c:1717:gst_aggregator_query_latency_unlocked:<comp> Latency query failed
0:06:21.909268156  1756   0x7f70007460 WARN                 basesrc gstbasesrc.c:3583:gst_base_src_start_complete:<source> pad not activated yet
0:06:21.910099324  1756   0x7f70007460 WARN                 basesrc gstbasesrc.c:3583:gst_base_src_start_complete:<source> pad not activated yet
sending message {"action": "STATE_CHANGE", "body": {"videoActiveVideo": "test1"}}
0:06:21.913367433  1756   0x7f7800e720 WARN                 qtdemux qtdemux_types.c:233:qtdemux_type_get: unknown QuickTime node type gsst
0:06:21.914455326  1756   0x7f7800e720 WARN                 qtdemux qtdemux_types.c:233:qtdemux_type_get: unknown QuickTime node type gstd
0:06:21.914529130  1756   0x7f7800e720 WARN                 qtdemux qtdemux.c:3031:qtdemux_parse_trex:<qtdemux12> failed to find fragment defaults for stream 1
0:06:21.914677884  1756   0x7f7800e720 WARN                 qtdemux qtdemux.c:3031:qtdemux_parse_trex:<qtdemux12> failed to find fragment defaults for stream 2
Opening in BLOCKING MODE 
0:06:21.964505314  1756     0x106ee320 WARN                    v4l2 gstv4l2object.c:4447:gst_v4l2_object_probe_caps:<nvv4l2decoder12:src> Failed to probe pixel aspect ratio with VIDIOC_CROPCAP: Unknown error -1
0:06:21.964574795  1756     0x106ee320 WARN                    v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x7f5802e8e0 Failed to determine interlace mode
0:06:21.964636619  1756     0x106ee320 WARN                    v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x7f5802e8e0 Failed to determine interlace mode
0:06:21.964687871  1756     0x106ee320 WARN                    v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x7f5802e8e0 Failed to determine interlace mode
NvMMLiteOpen : Block : BlockType = 261 
NVMEDIA: Reading vendor.tegra.display-size : status: 6 
NvMMLiteBlockCreate : Block : BlockType = 261 
0:06:22.070629669  1756     0x106ee320 WARN                    v4l2 gstv4l2object.c:4447:gst_v4l2_object_probe_caps:<nvv4l2decoder12:src> Failed to probe pixel aspect ratio with VIDIOC_CROPCAP: Unknown error -1
0:06:22.070809622  1756     0x106ee320 WARN                    v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x7f5802e8e0 Failed to determine interlace mode
0:06:22.071008898  1756     0x106ee320 WARN                    v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x7f5802e8e0 Failed to determine interlace mode
0:06:22.071295416  1756     0x106ee320 WARN                    v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x7f5802e8e0 Failed to determine interlace mode
Received new pad 'src_17' from 'vidsrc'
Link succeeded (type 'video/x-raw')
Received new pad 'src_18' from 'vidsrc'
It has type 'audio/x-raw' which is not raw audio/video. Ignoring.
0:06:22.080439932  1756     0x106ee320 WARN            v4l2videodec gstv4l2videodec.c:1673:gst_v4l2_video_dec_decide_allocation:<nvv4l2decoder12> Duration invalid, not setting latency
0:06:22.080866610  1756     0x106ee320 WARN          v4l2bufferpool gstv4l2bufferpool.c:1065:gst_v4l2_buffer_pool_start:<nvv4l2decoder12:pool:src> Uncertain or not enough buffers, enabling copy threshold
0:06:22.085282770  1756   0x7f5c0078f0 WARN          v4l2bufferpool gstv4l2bufferpool.c:1512:gst_v4l2_buffer_pool_dqbuf:<nvv4l2decoder12:pool:src> Driver should never set v4l2_buffer.field to ANY
0:06:22.091041154  1756   0x7f580cc320 FIXME               basesink gstbasesink.c:3145:gst_base_sink_default_event:<sinky-actual-sink-nvoverlay> stream-start event without group-id. Consider implementing group-id handling in the upstream elements
0:06:22.098388747  1756   0x7f580cc320 WARN            nvcompositor gstnvcompositor.c:980:gst_nvcompositor_negotiated_caps:<comp> Release old pool
0:06:22.108516102  1756   0x7f580cc320 ERROR                    omx gstomx.c:256:gst_omx_component_handle_messages:<sinky-actual-sink-nvoverlay> yuv420 port 0 was not flushing

Hi,
In this use-case the timestamps can be wrong. Not sure if it helps but please try to set sync=false to the sink. This will disable synchronization mechanism in gstreamer.

Hi thanks for your response.

I added this to the sink element as such.

        self.pipeline = Gst.parse_launch ("""
            nvcompositor name=comp sink_0::alpha=1 ! video/x-raw(memory:NVMM),format=RGBA ! 
            nvvidconv ! video/x-raw(memory:NVMM), format=NV12 ! autovideosink name=sinky sync=false uridecodebin name=vidsrc !
            nvvidconv name=pipeconv ! video/x-raw(memory:NVMM),format=RGBA,pixel-aspect-ratio=1/1 ! queue ! comp.sink_0 
        """)

Did not seem to change anything except make the video go faster and exit the process upon error faster.

Hi,
Please check if you can run like this sample:
Nvv4l2decoder sometimes fails to negotiate with downstream after several pipeline re-launches - #16 by DaneLLL

Wait for EoS and then re-initialize the pipeline.

Hi Dane thanks for your response.
I ran the pipeline in a loop as such as and it worked as expected. Video and audio playback was good.

#!/usr/bin/env python3

import gi
import time
gi.require_version('Gst', '1.0')
from gi.repository import Gst, GObject, GLib

pipeline = None
bus = None
message = None

# initialize GStreamer
Gst.init(None)

for i in range(1, 5566):
    print("loop =",i," ")
    # build the pipeline

    pipeline = Gst.parse_launch ("""
            nvcompositor name=comp sink_0::alpha=1 ! video/x-raw(memory:NVMM),format=RGBA ! 
            nvvidconv ! video/x-raw(memory:NVMM), format=NV12 ! autovideosink 
            uridecodebin uri=file:///home/videos/test.mp4 name=src ! nvvidconv ! video/x-raw(memory:NVMM),format=RGBA ! queue ! comp.sink_0 
            src. ! audioconvert ! autoaudiosink
    """)

    # start playing
    print("Switch to PLAYING state")
    pipeline.set_state(Gst.State.PLAYING)

    time.sleep(8)
    print("Send EoS")
    Gst.Element.send_event(pipeline, Gst.Event.new_eos())
    # wait until EOS or error
    bus = pipeline.get_bus()
    msg = bus.timed_pop_filtered(
        Gst.CLOCK_TIME_NONE, Gst.MessageType.EOS)

    # free resources
    print("Switch to NULL state")
    pipeline.set_state(Gst.State.NULL)
    time.sleep(2)```

As a side note I continued development of this pipeline and I discovered that the video and audio pads were not flushing after ending the video. In other words my on_pad_added callback is the problem.

The way I got around this was creating a black video with one frame (0.2 seconds long) to play before any video plays. This effectively clears the cache of the previous video/audio pads and allows me to play any video with the pipeline once again. Although not ideal and it adds a 1/2 second delay to the process it solved my problem of not being able to play videos consecutively.

Im leaving it below in case anyone is interested.

import gi
import time
gi.require_version('Gst', '1.0')
gi.require_version('GstPbutils', '1.0')
gi.require_version('GstController', '1.0')
from gi.repository import GObject, Gst, GstPbutils, GstController
from player_thread import PlayerThread
from threading import Timer
import logging
import time


class VideoPlayer(): 
    def __init__(self):
        Gst.init(None)
        GObject.threads_init()

        # Pipeline overview 
        # mp4 file → Video decoded → Video converted to rgba → dumped into the compositor element (where we can control alpha)
        # compositor element → converted to nicer colorspace → dumped into the Jetson device monitor

        self.pipeline = Gst.parse_launch ("""
            nvcompositor name=comp sink_0::alpha=1 ! video/x-raw(memory:NVMM),format=RGBA ! 
            nvvidconv ! video/x-raw(memory:NVMM), format=NV12 ! autovideosink name=sinky 
            uridecodebin name=vidsrc ! nvvidconv name=videoconvert ! video/x-raw(memory:NVMM),format=RGBA ! queue ! comp.sink_0 
            vidsrc. ! audioconvert name=audioconvert ! autoaudiosink
         """)

        self.loop = GObject.MainLoop()
        self.bus = self.pipeline.get_bus()
        self.bus.add_signal_watch()
        self.bus.connect("message::eos", self.bus_call, self.loop)
        self.bus.connect("message::error", self.bus_call, self.loop)
        self.discoverer = GstPbutils.Discoverer()

        self.sink  = self.pipeline.get_by_name('sinky')
        self.video_convert = self.pipeline.get_by_name('videoconvert')
        self.audio_convert  = self.pipeline.get_by_name('audioconvert')
        self.source = self.pipeline.get_by_name('vidsrc')
        self.compositor = self.pipeline.get_by_name('comp')

        self.source.connect("pad-added", self.on_pad_added)
        self.running = False
        self.play_thread = PlayerThread(self.pipeline, self.loop)
        self.play_thread.start()

    def get_alpha_controller(self, incoming_pad):
        self.pad = incoming_pad
        self.control_source = GstController.InterpolationControlSource()
        self.control_source.set_property('mode', GstController.InterpolationMode.LINEAR)
        self.control_bind = GstController.DirectControlBinding.new(self.pad, 'alpha', self.control_source)
        self.pad.add_control_binding(self.control_bind)
        return self.control_source

    def fade_video_in(self):
        self.compositor_sink_pad = self.compositor.get_static_pad('sink_0')
        self.control_source = self.get_alpha_controller(self.compositor_sink_pad)
        self.control_source.set(0*Gst.SECOND, 0)
        self.control_source.set(2*Gst.SECOND, 1)

    def fade_video_out(self):
            self.pos = self.pipeline.query_position(Gst.Format.TIME).cur 
            self.control_source.set(self.pos, 1)
            self.control_source.set(self.pos + 1*Gst.SECOND, 0)

    def on_pad_added(self, src, new_pad):
        print(
            "Received new pad '{0:s}' from '{1:s}'".format(
                new_pad.get_name(),
                src.get_name()))
        
        new_pad_caps = new_pad.get_current_caps()
        new_pad_struct = new_pad_caps.get_structure(0)
        new_pad_type = new_pad_struct.get_name()

        if new_pad_type.startswith("audio/x-raw"):
            sink_pad = self.audio_convert.get_static_pad("sink")
        elif new_pad_type.startswith("video/x-raw"):
            sink_pad = self.video_convert.get_static_pad("sink")
        else:
            print(
                "It has type '{0:s}' which is not raw audio/video. Ignoring.".format(new_pad_type))
            return

        # if our converter is already linked, we have nothing to do here
        # if(sink_pad.is_linked()):
        #     print("We are already linked. Ignoring.")
        #     return

        # attempt the link
        ret = new_pad.link(sink_pad)
        if not ret == Gst.PadLinkReturn.OK:
            print("Type is '{0:s}}' but link failed".format(new_pad_type))
        else:
            print("Link succeeded (type '{0:s}')".format(new_pad_type))

        return


    def play_video(self, video_url, scene_name):
        #plays black frame for 1/2 a second to flush process ...
        self.source.props.uri = 'file:///home/videos/black_video.mp4'
        self.pipeline.set_state(Gst.State.PLAYING)
        time.sleep(0.6)

        self.compositor.props.background = 1
        self.running = True
        self.scene_name = scene_name 
        self.video_url = video_url  
        self.source.props.uri = self.video_url
        self.play_thread.start_playback()
        self.compositor_sink_pad = self.compositor.get_static_pad('sink_0').set_property ("alpha", 1)
        self.fade_video_in()

    def stop_video(self):
        self.length = None
        self.running = False
        self.play_thread.stop_playback()

    def bus_call(self, bus, message, loop):
        logging.info("event received in bus_callback")
        t = message.type
        if t == Gst.MessageType.EOS:
            logging.info('end of video stream event')
            self.stop_video()

        elif t == Gst.MessageType.ERROR:
            print(message.parse_error())
        else:
                # should not get here
                print("ERROR: Unexpected message received")
        return True

1 Like

This topic was automatically closed 14 days after the last reply. New replies are no longer allowed.