From: Einar Jørgen Haraldseid Date: Thu, 19 Jun 2014 16:42:36 +0000 (+0200) Subject: Of course you can combine example 2 and 3 and get audio AND video X-Git-Url: https://git.slaskete.net/python-gstreamer-examples/commitdiff_plain/c5383e2b27c8b6da301037eaba28839182331414?hp=0b1f23d4eec1048a6680f0235f466a26888367ca Of course you can combine example 2 and 3 and get audio AND video --- diff --git a/example2+3.py b/example2+3.py new file mode 100755 index 0000000..d203020 --- /dev/null +++ b/example2+3.py @@ -0,0 +1,100 @@ +#!/usr/bin/env python +# Let's see if we can get a viewport AND audio working, combining ex. 2 and 3 + +# GdkX11 to get access to xid, GstVideo to get access to set_window_handle +from gi.repository import Gtk, Gst, GdkX11, GstVideo +import signal + +class Main: + def __init__(self): + + # Create gui bits and bobs + + self.mainwindow = Gtk.Builder() + self.mainwindow.add_from_file("example3.glade") + + signals = { + "on_play_clicked" : self.OnPlay, + "on_stop_clicked" : self.OnStop, + "on_quit_clicked" : self.OnQuit, + } + + self.mainwindow.connect_signals(signals) + + # Create GStreamer bits and bobs + + # Initiate the pipeline + Gst.init(None) + self.pipeline = Gst.Pipeline("mypipeline") + + # Add a videotestsrc element to the pipeline, set it to pattern "snow." + self.videotestsrc = Gst.ElementFactory.make("videotestsrc", "videosource") + self.videotestsrc.set_property("pattern", "snow") + self.pipeline.add(self.videotestsrc) + + # Add a capsfilter that we want to apply to our videotestsrc + self.videotestcaps = Gst.ElementFactory.make("capsfilter", "videotestcaps") + self.videotestcaps.set_property("caps",Gst.Caps.from_string("video/x-raw,width=640,height=480")) + self.pipeline.add(self.videotestcaps) + + # Link the capsfilter to the videotestsrc + self.videotestsrc.link(self.videotestcaps) + + # Add a videosink element to the pipeline + self.videosink = Gst.ElementFactory.make("autovideosink", "videosink") + self.pipeline.add(self.videosink) + + # Link the already linked videotestcaps to the sink + self.videotestcaps.link(self.videosink) + + # Add an audiotestsrc element to the pipeline + self.audiotestsrc = Gst.ElementFactory.make("audiotestsrc", "audio") + self.audiotestsrc.set_property("freq", 800) + self.pipeline.add(self.audiotestsrc) + + # Add a pulsesink element to the pipeline + self.pulsesink = Gst.ElementFactory.make("pulsesink", "sink") + self.pipeline.add(self.pulsesink) + + # Link the two elements together + self.audiotestsrc.link(self.pulsesink) + + # Set up a bus to our pipeline to get notified when the video is ready + self.bus = self.pipeline.get_bus() + self.bus.enable_sync_message_emission() + self.bus.connect("sync-message::element", self.OnSyncElement) + + # Summon the window and connect the window's close button to quit + self.window = self.mainwindow.get_object("mainwindow") + self.window.connect("delete-event", Gtk.main_quit) + self.window.show_all() + + # Get window ID of the viewport widget from the GUI + self.win_id = self.mainwindow.get_object("viewport").get_window().get_xid() + + + # When we get a message that video is ready to display, set the + # correct window id to hook it to our viewport + def OnSyncElement(self, bus, message): + if message.get_structure().get_name() == "prepare-window-handle": + print "prepare-window-handle" + message.src.set_window_handle(self.win_id) + + def OnPlay(self, widget): + print "play" + self.pipeline.set_state(Gst.State.PLAYING) + + def OnStop(self, widget): + print "stop" + self.pipeline.set_state(Gst.State.READY) + + def OnQuit(self, widget): + print "quit" + Gtk.main_quit() + + # Workaround to get Ctrl+C to terminate from command line + # ref: https://bugzilla.gnome.org/show_bug.cgi?id=622084#c12 + signal.signal(signal.SIGINT, signal.SIG_DFL) + +start = Main() +Gtk.main()