From c5cb0627fd129da4ee95de541e5a482e8d222e49 Mon Sep 17 00:00:00 2001 From: =?utf8?q?David=20Test=C3=A9?= Date: Mon, 29 Feb 2016 16:18:40 +0100 Subject: [PATCH] Main module simplified. --- stream_2016/libre-streamer.py | 315 +++++++--------------------------- 1 file changed, 62 insertions(+), 253 deletions(-) diff --git a/stream_2016/libre-streamer.py b/stream_2016/libre-streamer.py index 25da323..7fbd2a1 100755 --- a/stream_2016/libre-streamer.py +++ b/stream_2016/libre-streamer.py @@ -20,16 +20,16 @@ # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # TODO list: # ---------- -# - Add a form to fill before start streaming (conf title, name, etc...) +# - Display the Gst element 'videotestsrc', in case of failure of the pipeline # - Add a checkbox to enable/disable options (storing/streaming - storing only - stream only - etc...) # - Add a function to get the ip address of the camera automatically (see github.com/paulmilliken) -# - Create a module for the pipeline construction section to clarify the code -# - Implement 2016 edition pipeline, see file 'gstream_pipeline_by_quidam' # - Create a module for the network configuration (fan/cpu, ifconfig, stream server,etc) # - Generate a log file during runtime. (e.g. this will let you know if the network configuration # and the pipeline construction went well (or not)) # - Add an input source choice for the user (camera on IP or webcam) +# - Add a time counter # - Add a VU-meter to check if audio feed is emitting signal +# - Add a 'CPU load' widget # - Add the FSF logo (need to do some pixel art) as an application icon # - Add the FSF logo inside the streamer use the 'textoverlay' method in ElementFactory.make() # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -48,6 +48,7 @@ __status__ = 'Prototype' import sys +from time import time, gmtime, strftime import gi gi.require_version('Gtk', '3.0') @@ -56,31 +57,66 @@ gi.require_version('Gst', '1.0') from gi.repository import Gst from gi.repository import GdkX11 from gi.repository import GstVideo +from gi.repository import GObject +import gstconf + +formatted_date = strftime('%Y_%m_%d', gmtime()) +metadata = {'speaker_name':'NC', + 'session_title':'NC', + 'organisation':'NC',} +start_time = 0 class Streamgui(object): def __init__(self): - self.multimedia_file="" - self.pipel = self.constructpipeline() # Create the GUI self.win = Gtk.Window() self.win.set_title("Libre-Streamer") self.win.connect("delete_event", lambda w,e: Gtk.main_quit()) vbox = Gtk.VBox(False, 0) + vbox_labels = Gtk.VBox(False, 0) + vbox_entries = Gtk.VBox(False, 0) + vbox_streaminfo = Gtk.VBox(False, 0) + vbox_cpuinfo = Gtk.VBox(False, 0) hbox = Gtk.HBox(False, 0) - self.load_file = Gtk.FileChooserButton("Choose Audio File") - self.stream_button = Gtk.Button("Stream") + hbox_time = Gtk.HBox(False, 0) + self.videowidget = Gtk.DrawingArea() self.videowidget.set_size_request(600, 400) - self.load_file.connect("selection-changed", self.on_file_selected) - self.stream_button.connect("clicked", self.on_stream_clicked) - hbox.pack_start(self.stream_button, False, True, 0) - vbox.pack_start(self.load_file, False, True, 0) + self.baseinfo_label = Gtk.Label('Base info: ') + self.baseinfo_entry_label = Gtk.Label('LP_' + formatted_date) + self.speakerinfo_label = Gtk.Label('Speaker name: ') + self.speakerinfo_entry = Gtk.Entry() + self.sessioninfo_label = Gtk.Label('Session name: ') + self.sessioninfo_entry = Gtk.Entry() + self.organisationinfo_label = Gtk.Label('Organisation name: ') + self.organisationinfo_entry = Gtk.Entry() + + self.stream_button = Gtk.Button("Stream") + self.stream_button.connect("clicked", self.on_stream_clicked) + self.streamtime_label = Gtk.Label('Time elapsed ') + self.streamtime_value = Gtk.Label('00:00:00') + + vbox_labels.pack_start(self.baseinfo_label, True, True, 0) + vbox_labels.pack_start(self.speakerinfo_label, True, True, 0) + vbox_labels.pack_start(self.sessioninfo_label, True, True, 0) + vbox_labels.pack_start(self.organisationinfo_label, True, True, 0) + vbox_entries.pack_start(self.baseinfo_entry_label, True, True, 0) + vbox_entries.pack_start(self.speakerinfo_entry, True, True, 0) + vbox_entries.pack_start(self.sessioninfo_entry, True, True, 0) + vbox_entries.pack_start(self.organisationinfo_entry, True, True, 0) + vbox_streaminfo.pack_start(self.stream_button, False, True, 15) + hbox_time.pack_start(self.streamtime_label, False, False, 0) + hbox_time.pack_start(self.streamtime_value, False, False, 0) + vbox_streaminfo.pack_start(hbox_time, False, True, 0) + hbox.pack_start(vbox_labels, False, False, 0) + hbox.pack_start(vbox_entries, False, False, 0) + hbox.pack_start(vbox_streaminfo, False, False, 0) vbox.pack_start(self.videowidget, True, True, 0) vbox.pack_start(hbox, False, True, 0) self.win.add(vbox) @@ -88,241 +124,12 @@ class Streamgui(object): self.win.show_all() self.xid = self.videowidget.get_property('window').get_xid() - self.connectsignals() - - def connectsignals(self): - """Connects signals with the methods""" - bus = self.pipel.get_bus() - bus.add_signal_watch() - bus.enable_sync_message_emission() - # Used to get messages that GStreamer emits. - bus.connect("message", self.on_message) - # Used for connecting video to your application. - bus.connect("sync-message::element", self.on_sync_message) - # Connect the rtpjpegdepay signal - self.videosrc.connect("pad-added", self.on_pad_added_to_rtspsrc) - - - def on_pad_added_to_rtspsrc(self, rtspsrc, pad): - sinkpad = self.queuev_1.get_static_pad('sink') - pad.link(sinkpad) - print('[DEBUG] rtspsrc LINKED') - - def on_pad_added_to_jpegdec(self, jpegdec, pad): - screen_sinkpad = self.screensink.get_static_pad('sink') - pad.link(screen_sinkpad) - print('[DEBUG] decodebin LINKED') - - def connect_tee(self, tee_element, input_element, output_element_1, output_element_2): - """Links input and outputs of a given Gst tee element.""" - # Find a way to check if the element given are in the pipeline - # then pass the result to the 'if' statement. -## argcheck = [True for arg in locals() if arg in 'the_list_of_elements_added'] -## print('[DEBUG] ArgList check: ', argcheck) -## if False not in argcheck - if True: - input_element.link(tee_element) - tee_element.link(output_element_1) - tee_element.link(output_element_2) - print('[DEBUG] Elements linked to tee') - else: - print('[ERROR] Couldn\'t link the tee. Element(s) probably not in the pipeline ') - - def constructpipeline (self): - """Add and link elements in a GStreamer pipeline""" - # Create the pipelines instance. - self.streampipe = Gst.Pipeline() - - # Define pipeline elements. - self.videosrc = Gst.ElementFactory.make('rtspsrc', 'videosrc') - self.videosrc.set_property('location', 'rtsp://192.168.48.2:554') - self.videosrc.set_property('latency', 100) - - self.decodebin = Gst.ElementFactory.make('decodebin', 'decodebin') - -## Video source for testing purpose: -## self.videosrc = Gst.ElementFactory.make('videotestsrc', 'videosrc') - self.rtpjpegdepay = Gst.ElementFactory.make('rtpjpegdepay', 'rtpjpegdepay') - self.jpegdec = Gst.ElementFactory.make('jpegdec', 'jpegdec') - self.jpegdec.set_property('max-errors', -1) - self.mkvmux = Gst.ElementFactory.make('matroskamux', 'mkvmux') - self.tee_rawvideo = Gst.ElementFactory.make('tee', 'tee_rawvideo') - self.queuev_1 = Gst.ElementFactory.make('queue', 'queuev_1') - self.queuev_2 = Gst.ElementFactory.make('queue', 'queuev_2') - self.queuev_3 = Gst.ElementFactory.make('queue', 'queuev_3') - self.queuev_4 = Gst.ElementFactory.make('queue', 'queuev_4') - - self.disksink_rawvideo = Gst.ElementFactory.make('filesink') -#[TO DO]: File location has to be defined - self.disksink_rawvideo.set_property('location', 'popo_rawvideo') - self.screensink = Gst.ElementFactory.make('xvimagesink', 'screensink') -# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -# IMPORTANT: -# for 'webmmux' element streamable=True MUST be set! -# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - # Elements to test: - #--------------------------------------------------------------------------- - self.audiosrc = Gst.ElementFactory.make('pulsesrc', 'audiosrc') - self.vorbisenc = Gst.ElementFactory.make('vorbisenc', 'vorbisenc') - self.oggmux = Gst.ElementFactory.make('oggmux', 'oggmux') - self.scaling = Gst.ElementFactory.make('videoscale', 'scaling') -## scaling_caps = Gst.caps_from_string('video/x-raw, width=640, height=360') -## self.scaling.set_property('width', 640) -## self.scaling.set_property('height', 360) -## self.scalefilter = Gst.ElementFactory.make('capsfilter') -## self.scalefilter.set_property('caps', scaling_caps) - self.vp8enc = Gst.ElementFactory.make('vp8enc', 'vp8enc') - self.vp8enc.set_property('min_quantizer', 1) - self.vp8enc.set_property('max_quantizer', 13) - self.vp8enc.set_property('cpu-used', 5) - self.vp8enc.set_property('deadline', 42000) - self.vp8enc.set_property('threads', 2) - self.vp8enc.set_property('sharpness', 7) - self.webmmux = Gst.ElementFactory.make('webmmux', 'webmmux') - self.webmmux.set_property('streamable', True) + self.pipel = gstconf.New_user_pipeline() - self.tee_videodecoded = Gst.ElementFactory.make('tee', 'tee_videodecoded') - self.tee_streamfull = Gst.ElementFactory.make('tee', 'tee_streamfull') - self.tee_streamaudio = Gst.ElementFactory.make('tee', 'tee_streamaudio') - self.queuea_1 = Gst.ElementFactory.make('queue', 'queuea_1') - self.queuea_2 = Gst.ElementFactory.make('queue', 'queuea_2') - self.queuea_3 = Gst.ElementFactory.make('queue', 'queuea_3') - self.queuea_4 = Gst.ElementFactory.make('queue', 'queuea_4') - self.queuev_5 = Gst.ElementFactory.make('queue', 'queuev_5') - self.queuev_6 = Gst.ElementFactory.make('queue', 'queuev_6') - self.queuev_7 = Gst.ElementFactory.make('queue', 'queuev_7') - self.queuem_1 = Gst.ElementFactory.make('queue', 'queuem_1') - self.queuem_2 = Gst.ElementFactory.make('queue', 'queuem_2') - - self.disksink_audio = Gst.ElementFactory.make('filesink') - self.disksink_audio.set_property('location', 'popo_audio') - self.disksink_stream = Gst.ElementFactory.make('filesink') - self.disksink_stream.set_property('location', 'popo_stream') - - self.icecastsink_audio = Gst.ElementFactory.make('shout2send', 'icecastsink_audio') - # Configuration should be written on a file locally to keep safe private addresses - self.icecastsink_audio.set_property('ip', 'live2.fsf.org') - self.icecastsink_audio.set_property('port', 80) - self.icecastsink_audio.set_property('mount', 'testaudio.ogv') - self.icecastsink_audio.set_property('password', 'thahw3Wiez') - self.icecastsink_stream = Gst.ElementFactory.make('shout2send', 'icecastsink_stream') - self.icecastsink_stream.set_property('ip', 'live2.fsf.org') - self.icecastsink_stream.set_property('port', 80) - self.icecastsink_stream.set_property('mount', 'teststream.ogv') - self.icecastsink_stream.set_property('password', 'thahw3Wiez') - #--------------------------------------------------------------------------- - - # Add the elements to the pipeline. - # + Raw video and screen monitoring feed: - self.streampipe.add(self.videosrc) - self.streampipe.add(self.queuev_1) - self.streampipe.add(self.rtpjpegdepay) - self.streampipe.add(self.queuev_2) - self.streampipe.add(self.jpegdec) - self.streampipe.add(self.tee_rawvideo) - self.streampipe.add(self.queuev_3) - self.streampipe.add(self.mkvmux) - self.streampipe.add(self.queuev_4) -## self.streampipe.add(self.tee_videodecoded) - self.streampipe.add(self.disksink_rawvideo) - self.streampipe.add(self.screensink) - # + Audio feed: - self.streampipe.add(self.audiosrc) - self.streampipe.add(self.queuea_1) - self.streampipe.add(self.vorbisenc) - self.streampipe.add(self.tee_streamaudio) - self.streampipe.add(self.queuea_2) - self.streampipe.add(self.oggmux) - self.streampipe.add(self.queuea_3) - self.streampipe.add(self.disksink_audio) - self.streampipe.add(self.queuea_4) - self.streampipe.add(self.icecastsink_audio) - # + Stream (audio and video) feed: - """ - self.streampipe.add(self.queuev_6) - self.streampipe.add(self.scaling) - self.streampipe.add(self.scalefilter) - self.streampipe.add(self.vp8enc) - self.streampipe.add(self.queuev_5) - self.streampipe.add(self.webmmux) - self.streampipe.add(self.tee_streamfull) - self.streampipe.add(self.queuev_7) - self.streampipe.add(self.queuem_1) - self.streampipe.add(self.queuem_2) - self.streampipe.add(self.disksink_stream) -## self.streampipe.add(self.icecastsink_stream) - """ - - - # Link the elements in the pipeline. - # + Raw video and screen monitoring feed: - self.queuev_1.link(self.rtpjpegdepay) -## self.rtpjpegdepay.link(self.tee_rawvideo) -## self.tee_rawvideo.link(self.queuev_2) -## self.tee_rawvideo.link(self.jpegdec) - self.connect_tee(self.tee_rawvideo, - self.rtpjpegdepay, - self.queuev_2, - self.jpegdec,) - self.queuev_2.link(self.mkvmux) - self.mkvmux.link(self.queuev_4) - self.queuev_4.link(self.disksink_rawvideo) - """ - self.connect_tee(self.tee_videodecoded, - self.jpegdec, - self.queuev_3, - self.queuev_6,) - """ - self.jpegdec.link(self.queuev_3) - self.queuev_3.link(self.screensink) - # + Audio feed: - self.audiosrc.link(self.queuea_1) - self.queuea_1.link(self.vorbisenc) - self.vorbisenc.link(self.queuea_2) - self.vorbisenc.link(self.queuea_2) - self.queuea_2.link(self.oggmux) - self.connect_tee(self.tee_streamaudio, - self.oggmux, - self.queuea_3, - self.queuea_4,) - self.queuea_3.link(self.disksink_audio) - self.queuea_4.link(self.icecastsink_audio) - # + Linking Stream (audio and video) feed: -## self.queuev_6.link(self.scalefilter) -## self.scalefilter.link(self.scaling) -## self.scaling.link(self.vp8enc) - """ - self.queuev_6.link(self.vp8enc) - self.vp8enc.link(self.queuev_7) - self.queuev_7.link(self.webmmux) - self.webmmux.link(self.queuem_1) -## self.queuea_4.link(self.webmmux) - self.connect_tee(self.tee_streamfull, - self.webmmux, - self.queuem_1, - self.queuem_2,) + bus = gstconf.get_gstreamer_bus() + bus.connect("sync-message::element", self.on_sync_message) -## self.webmmux.link(self.queuem_1) -## self.webmmux.link(self.queuem_2) - self.queuem_1.link(self.disksink_stream) -## self.queuem_2.link(self.icecastsink_stream) - """ - return self.streampipe - - def on_message(self, bus, message): - - t = message.type - if t == Gst.MessageType.EOS: - self.pipel.set_state(Gst.State.NULL) - self.stream_button.set_label('Stream') - elif t == Gst.MessageType.ERROR: - err, debug = message.parse_error() - print ("Error: %s" % err, debug) - self.pipel.set_state(Gst.State.NULL) - self.stream_button.set_label('Stream') - def on_sync_message(self, bus, message): if message.get_structure().get_name() == 'prepare-window-handle': @@ -331,23 +138,25 @@ class Streamgui(object): imagesink.set_window_handle(self.videowidget.get_property('window').get_xid()) - # USE THAT FUNCTION TO GET THE SOURCE CHOICE (ELPHEL OR WEBCAM) - def on_file_selected(self, widget): - - self.multimedia_file = self.load_file.get_filename() - def on_stream_clicked(self, widget): labelname = self.stream_button.get_label() - if labelname == 'Stream': - self.pipel.set_state(Gst.State.PLAYING) + if labelname == 'Stream': + self.pipel.stream_play() +## self.pipel.get_stream_state() self.stream_button.set_label('ON AIR') + start_time = time() elif labelname == 'ON AIR': - self.pipel.set_state(Gst.State.NULL) + self.pipel.stream_stop() self.stream_button.set_label('Stream') - +## Use threads to refresh the time elapsed sinc the begining of the stream?? + def time_elapsed(self, widget): + if self.pipel.stream_get_state() == 'PLAYING': + pass + + if __name__ == "__main__": Gst.init() Streamgui() - Gtk.main() \ No newline at end of file + Gtk.main() -- 2.25.1