Testing pipeline completed.
[libre-streamer.git] / stream_2016 / libre-streamer.py
index 86dadc22abb380bc99236c851ec7e704c30a0115..6fae3db68b726015f410cf048fd3d1c7f060afa9 100755 (executable)
@@ -1,42 +1,47 @@
 #!/usr/bin/env python3.4
+# -*- coding: utf-8 -*-
 
-# This file is part of Libre-Streamer.
+# This file is part of ABYSS.
+# ABYSS Broadcast Your Streaming Successfully 
 #
-# Libre-Streamer is free software: you can redistribute it and/or modify
+# ABYSS is free software: you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
 # the Free Software Foundation, either version 3 of the License, or
 # (at your option) any later version.
 #
-# Libre-Streamer is distributed in the hope that it will be useful,
+# ABYSS is distributed in the hope that it will be useful,
 # but WITHOUT ANY WARRANTY; without even the implied warranty of
 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 # GNU General Public License for more details.
 #
 # You should have received a copy of the GNU General Public License
-# along with Libre-Streamer.  If not, see <http://www.gnu.org/licenses/>.
+# along with ABYSS.  If not, see <http://www.gnu.org/licenses/>.
 #
 # Copyright (c) 2016 David Testé
 
 # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 # TODO list:
 # ----------
-#    - Add a form to fill before start streaming (conf title, name, etc...)
+#    - Implement a method to switch to webcam feed if Elphel cam feed is lost
+#         --> Use ping by opening Telnet connexion every 2 seconds (if it fails, then switch to webcam)
+#         --> Has to be threaded
 #    - Add a checkbox to enable/disable options (storing/streaming - storing only - stream only - etc...)
 #    - Add a function to get the ip address of the camera automatically (see github.com/paulmilliken)
-#    - Create a module for the pipeline construction section to clarify the code
-#    - Implement 2016 edition pipeline, see file 'gstream_pipeline_by_quidam'
 #    - Create a module for the network configuration (fan/cpu, ifconfig, stream server,etc)
+#         --> Taken care in FAI building
 #    - Generate a log file during runtime. (e.g. this will let you know if the network configuration
 #      and the pipeline construction went well (or not))
 #    - Add an input source choice for the user (camera on IP or webcam)
-#    - Add a VU-meter to check if audio feed is emitting signal
+#    - Add a time counter
+#         --> Has to be threaded
+#    - Add a 'CPU load' widget 
 #    - Add the FSF logo (need to do some pixel art) as an application icon
 #    - Add the FSF logo inside the streamer use the 'textoverlay' method in ElementFactory.make()
 # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
 # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
 # INFO: run the following command in a terminal before launching libre-streamer to get a error log.
-# GST_DEBUG=3,python:5,gnl*:5 ./libre-streamer.py | tee -a log 2>&1
+# GST_DEBUG=4,python:5,gnl*:5 ./libre-streamer.py | tee -a log 2>&1
 # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
 
 __author__ = 'David Testé'
@@ -48,13 +53,37 @@ __status__ = 'Prototype'
 
 
 import sys
+from time import time, localtime, strftime
 
 import gi
 gi.require_version('Gtk', '3.0')
 from gi.repository import Gtk
+from gi.repository import Gdk
+gi.require_version('Gst', '1.0')
 from gi.repository import Gst
 from gi.repository import GdkX11
 from gi.repository import GstVideo
+from gi.repository import GObject
+
+import gstconf
+
+# Based on 2016 FSF's ELPHEL camera configuration
+PORT = ':554'
+IP_1 = '192.168.48.2'
+IP_2 = '192.168.48.3'
+IP_3 = '192.168.48.4'
+CAM1_IP1 = 'CAM_1: ' + IP_1
+CAM2_IP2 = 'CAM_2: ' + IP_2
+CAM3_IP3 = 'CAM_3: ' + IP_3
+rtsp_address = None
+ENTRYFIELD_TEXT = 'Please fill both entry field to stop streaming.'
+CAMCHOICE_TEXT = 'Please choose a camera address.'
+TESTMODE_TEXT = 'Quit testing mode to switch to streaming mode.'
+formatted_date = strftime('%Y_%m_%d', localtime())
+metadata = {'speaker_name':'NC',
+            'session_title':'NC',
+            'organisation':'NC',}
+start_time = 0
 
 
 class Streamgui(object):
@@ -62,191 +91,130 @@ class Streamgui(object):
 
     def __init__(self):
 
-        self.multimedia_file=""
-        # Create the global pipeline (might wanna use a general bin instead)
-        self.pipel = self.constructpipeline()
+        # Initialize a pipeline
+        self.pipel = None
+
         # Create the GUI
         self.win = Gtk.Window()
-        self.win.set_title("Libre-Streamer")
+        self.win.set_title("ABYSS")
         self.win.connect("delete_event",
                          lambda w,e: Gtk.main_quit())
+##        self.win.fullscreen()
         vbox = Gtk.VBox(False, 0)
-        hbox = Gtk.HBox(False, 0)
-        self.load_file = Gtk.FileChooserButton("Choose Audio File")
-        self.stream_button = Gtk.Button("Stream")
+        vbox_labels = Gtk.VBox(False, 0)
+        vbox_entries = Gtk.VBox(False, 0)
+        vbox_streaminfo = Gtk.VBox(True, 0)
+        vbox_tbuttongrp = Gtk.VBox(False, 0)
+        hbox = Gtk.HBox(False, 30)
+        hbox_videoaudio = Gtk.HBox(False, 0)
+        hbox_time = Gtk.HBox(False, 0)
+        hbox_cpu = Gtk.HBox(False, 0)
+        
         self.videowidget = Gtk.DrawingArea()
-        self.videowidget.set_size_request(600, 400)
-        self.load_file.connect("selection-changed", self.on_file_selected)
-        self.stream_button.connect("clicked", self.on_stream_clicked)
-
-        hbox.pack_start(self.stream_button, False, True, 0)
-        vbox.pack_start(self.load_file, False, True, 0)
-        vbox.pack_start(self.videowidget, True, True, 0)
+        self.videowidget.set_size_request(800, 600)
+
+
+        # True stereo feed has to be implemented:
+        self.vumeter_l = Gtk.ProgressBar()
+        self.vumeter_l.set_orientation(Gtk.Orientation.VERTICAL)
+        self.vumeter_l.set_inverted(True)
+        self.vumeter_r = Gtk.ProgressBar()
+        self.vumeter_r.set_orientation(Gtk.Orientation.VERTICAL)
+        self.vumeter_r.set_inverted(True)
+## Use CSS to modify the color of ProgressBar
+##        color = Gdk.RGBA()
+##        Gdk.RGBA.parse(color, 'rgb(240,0,150)')
+##        print ("Color: ", color)
+##        self.vumeter.override_background_color(Gtk.StateFlags.NORMAL, color)
+##        self.vumeter.override_symbolic_color('bg_color', color)
+##        self.vumeter.override_symbolic_color('theme_bg_color', color)
+        
+        self.baseinfo_label = Gtk.Label('Base info: ')
+        self.baseinfo_entry_label = Gtk.Label('LP_' + formatted_date)
+        self.speakerinfo_label = Gtk.Label('Speaker name: ')
+        self.speakerinfo_entry = Gtk.Entry()
+        self.sessioninfo_label = Gtk.Label('Session name: ')
+        self.sessioninfo_entry = Gtk.Entry()
+
+        self.stream_button = Gtk.Button('Stream')
+        self.stream_button.connect('clicked', self.on_stream_clicked)
+        self.streamtime_label = Gtk.Label('Time elapsed ')
+        self.streamtime_value = Gtk.Label('00:00:00')
+        self.test_button = Gtk.Button('Set-up test')
+        self.test_button.connect('clicked', self.on_test_clicked)
+
+        self.cpuload_label = Gtk.Label('CPU load: ')
+        self.cpuload_value = Gtk.Label('NC')
+
+        self.cam1_tbutton = Gtk.ToggleButton(None, label=CAM1_IP1)
+        self.cam1_tbutton.connect('toggled', self.on_tbutton_toggled, 'cam1')
+        self.cam2_tbutton = Gtk.ToggleButton(self.cam1_tbutton)
+        self.cam2_tbutton.set_label(CAM2_IP2)
+        self.cam2_tbutton.connect('toggled', self.on_tbutton_toggled, 'cam2')
+        self.cam3_tbutton = Gtk.ToggleButton(self.cam1_tbutton)
+        self.cam3_tbutton.set_label(CAM3_IP3)
+        self.cam3_tbutton.connect('toggled', self.on_tbutton_toggled, 'cam3')
+
+        self.entryfield_info = Gtk.MessageDialog(buttons=Gtk.ButtonsType.CLOSE,
+                                                 text=ENTRYFIELD_TEXT,)
+                                                 ##messagetype=Gtk.MessageType.WARNING,
+                                                 ##Gtk.MessageType.INFO,)
+        self.camchoice_info = Gtk.MessageDialog(buttons=Gtk.ButtonsType.CLOSE,
+                                                text=CAMCHOICE_TEXT,)
+        self.testmode_info = Gtk.MessageDialog(buttons=Gtk.ButtonsType.CLOSE,
+                                               text=TESTMODE_TEXT,)
+        
+        hbox_videoaudio.pack_start(self.videowidget, True, True, 0)
+        hbox_videoaudio.pack_start(self.vumeter_l, False, False, 3)
+        hbox_videoaudio.pack_start(self.vumeter_r, False, False, 3)
+        vbox_labels.pack_start(self.baseinfo_label, True, True, 0)
+        vbox_labels.pack_start(self.speakerinfo_label, True, True, 0)
+        vbox_labels.pack_start(self.sessioninfo_label, True, True, 0)
+        vbox_entries.pack_start(self.baseinfo_entry_label, True, True, 0)
+        vbox_entries.pack_start(self.speakerinfo_entry, True, True, 0)
+        vbox_entries.pack_start(self.sessioninfo_entry, True, True, 0)
+        hbox_time.pack_start(self.streamtime_label, False, False, 0)
+        hbox_time.pack_start(self.streamtime_value, False, False, 0)
+        hbox_cpu.pack_start(self.cpuload_label, False, False, 0)
+        hbox_cpu.pack_start(self.cpuload_value, False, False, 0)
+        vbox_streaminfo.pack_start(hbox_time, False, True, 0)
+        vbox_streaminfo.pack_start(hbox_cpu, False, True, 0)
+        vbox_tbuttongrp.pack_start(self.cam1_tbutton, False, False, 0)
+        vbox_tbuttongrp.pack_start(self.cam2_tbutton, False, False, 0)
+        vbox_tbuttongrp.pack_start(self.cam3_tbutton, False, False, 0)
+        hbox.pack_start(vbox_labels, False, False, 0)
+        hbox.pack_start(vbox_entries, False, False, 0)
+        hbox.pack_start(vbox_tbuttongrp, False, False, 0)
+        hbox.pack_start(self.test_button, False, False, 0)
+        hbox.pack_start(self.stream_button, False , False, 0)
+        hbox.pack_start(vbox_streaminfo, False, False, 0)
+        vbox.pack_start(hbox_videoaudio, True, True, 0)
         vbox.pack_start(hbox, False, True, 0)
+
         self.win.add(vbox)
         self.win.set_position(Gtk.WindowPosition.CENTER)
         self.win.show_all()
 
         self.xid = self.videowidget.get_property('window').get_xid()
-        self.connectsignals()
-
-    def connectsignals(self):
-        """Connects signals with the methods"""
-        bus = self.pipel.get_bus()
-        bus.add_signal_watch()
-        bus.enable_sync_message_emission()
-        # Used to get messages that GStreamer emits.
-        bus.connect("message", self.on_message)
-        # Used for connecting video to your application.
-        bus.connect("sync-message::element", self.on_sync_message)
-        # Connect the rtpdepay signal
-        self.videosrc.connect("pad-added", self.on_pad_added_to_rtspsrc)
-        if self.decodebin:
-            self.decodebin.connect("pad-added", self.on_pad_added_to_decodebin)
-##        elif self.jpegdec:
-##            self.jpegdec.connect("pad-added", self.on_pad_added_to_jpegdec)
-            
-    def on_pad_added_to_rtspsrc(self, rtspsrc, pad):
-##        if self.decodebin:
-##            sinkpad = self.decodebin.get_static_pad('sink')
-##        elif self.rtpjpegdepay:
-##            sinkpad = self.rtpjpegdepay.get_static_pad('sink')
-        sinkpad = self.queuev_1.get_static_pad('sink')
-        pad.link(sinkpad)
-        print('[DEBUG] rtspsrc LINKED')
-
-    def on_pad_added_to_decodebin(self, decodebin, pad):
-        screen_sinkpad = self.screensink.get_static_pad('sink')
-        pad.link(screen_sinkpad)
-        print('[DEBUG] decodebin LINKED')
-
-    def on_pad_added_to_jpegdec(self, jpegdec, pad):
-        screen_sinkpad = self.screensink.get_static_pad('sink')
-        pad.link(screen_sinkpad)
-        print('[DEBUG] decodebin LINKED')
-        
-    def constructpipeline (self):
-        """Add and link elements in a GStreamer pipeline"""
-        # Create the pipelines instance.
-        self.streampipe = Gst.Pipeline()
-
-        # Define pipeline elements.
-        self.videosrc = Gst.ElementFactory.make('rtspsrc', 'videosrc')
-        self.videosrc.set_property('location', 'rtsp://192.168.48.2:554')
-        self.videosrc.set_property('latency', 100)
-
-        self.decodebin = Gst.ElementFactory.make('decodebin', 'decodebin')
-        
-## Video source for testing purpose:        
-##        self.videosrc = Gst.ElementFactory.make('videotestsrc', 'videosrc')
-        self.rtpjpegdepay = Gst.ElementFactory.make('rtpjpegdepay', 'rtpjpegdepay')
-        self.jpegdec = Gst.ElementFactory.make('jpegdec', 'jpegdec')
-        self.jpegdec.set_property('max-errors', -1)
-        self.mkvmux = Gst.ElementFactory.make('matroskamux', 'mkvmux')
-        self.tee_rawvideo = Gst.ElementFactory.make('tee', 'tee_rawvideo')
-        self.queuev_1 = Gst.ElementFactory.make('queue', 'queuev_1')
-        self.queuev_2 = Gst.ElementFactory.make('queue', 'queuev_2')
-        self.queuev_3 = Gst.ElementFactory.make('queue', 'queuev_3')
-        self.queuev_4 = Gst.ElementFactory.make('queue', 'queuev_4')
-
-        self.disksink_rawvideo = Gst.ElementFactory.make('filesink')
-#[TO DO]: File location has to be defined 
-        self.disksink_rawvideo.set_property('location', 'popo_rawvideo')
-        self.screensink = Gst.ElementFactory.make('xvimagesink', 'screensink')
-        
-# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-# IMPORTANT:
-# for 'webmmux' element streamable=True MUST be set!
-# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
-        # Elements to test:
-        #---------------------------------------------------------------------------
-        self.audiosrc = Gst.ElementFactory.make('pulsesrc', 'audiosrc')
-        self.vorbisenc = Gst.ElementFactory.make('vorbisenc', 'vorbisenc')
-##        scaling_caps = Gst.Caps('video/x-raw, width=640, height=360')
-        self.scaling = Gst.ElementFactory.make('videoscale', 'scaling')
-        self.vp8enc = Gst.ElementFactory.make('vp8enc', 'vp8enc')
-        self.vp8enc.set_property('min_quantizer', 1)
-        self.vp8enc.set_property('max_quantizer', 13)
-        self.vp8enc.set_property('cpu-used', 5)
-        self.vp8enc.set_property('deadline', 42000)
-        self.vp8enc.set_property('threads', 2)
-        self.vp8enc.set_property('sharpness', 7)
-        self.webmmux = Gst.ElementFactory.make('webmmux', 'webmmux')
-        self.webmmux.set_property('streamable', True)
-        
-        self.tee_streamvideo = Gst.ElementFactory.make('tee', 'tee_streamvideo')
-        self.tee_streamaudio = Gst.ElementFactory.make('tee', 'tee_streamaudio')
-        self.queuea_1 = Gst.ElementFactory.make('queue', 'queuea_1')
-        self.queuea_2 = Gst.ElementFactory.make('queue', 'queuea_2')
-        
-        self.disksink_audio = Gst.ElementFactory.make('filesink')
-        self.disksink_audio.set_property('location', 'popo_audio')
-        self.disksink_stream = Gst.ElementFactory.make('filesink')
-        self.disksink_stream.set_property('location', 'popo_stream')
-
-        self.icecastsink_audio = Gst.ElementFactory.make('shout2send', 'icecastsink_audio')
-        # Configuration should be written on a file locally to keep safe private addresses
-        self.icecastsink_audio.set_property('ip', 'live2.fsf.org')
-        self.icecastsink_audio.set_property('port', 80)
-        self.icecastsink_audio.set_property('mount', 'testaudio.ogv')
-        self.icecastsink_audio.set_property('password', '')
-        self.icecastsink_stream = Gst.ElementFactory.make('shout2send', 'icecastsink_stream')
-        self.icecastsink_stream.set_property('ip', 'live2.fsf.org')
-        self.icecastsink_stream.set_property('port', 80)
-        self.icecastsink_stream.set_property('mount', 'teststream.ogv')
-        self.icecastsink_stream.set_property('password', '')
-        #---------------------------------------------------------------------------
-        
-        # Add the elements to the pipeline.
-        # Test of the first two lines of quidam's pipeline:
-        self.streampipe.add(self.videosrc)
-##        self.streampipe.add(self.decodebin)
-        self.streampipe.add(self.queuev_1)
-        self.streampipe.add(self.rtpjpegdepay)
-        self.streampipe.add(self.queuev_2)
-        self.streampipe.add(self.jpegdec)
-        self.streampipe.add(self.tee_rawvideo)
-        self.streampipe.add(self.queuev_3)
-        self.streampipe.add(self.mkvmux)
-        self.streampipe.add(self.queuev_4)
-        self.streampipe.add(self.disksink_rawvideo)
-        self.streampipe.add(self.screensink)
-
-        
-        # Link the elements in the pipeline.
-##        self.videosrc.link(self.decodebin)
-        self.queuev_1.link(self.rtpjpegdepay)
-##        self.rtpjpegdepay.link(self.queuev_2)
-##        self.rtpjpegdepay.link(self.jpegdec)
-        self.rtpjpegdepay.link(self.tee_rawvideo)
-##        self.queuev_2.link(self.jpegdec)
-##        self.jpegdec.link(self.tee_rawvideo)
-##        self.jpegdec.link(self.queuev_3)
-        self.tee_rawvideo.link(self.queuev_2)
-        self.tee_rawvideo.link(self.jpegdec)
-##        self.tee_rawvideo.link(self.queuev_3)
-        self.queuev_2.link(self.mkvmux)
-        self.mkvmux.link(self.queuev_4)
-        self.queuev_4.link(self.disksink_rawvideo)
-##        self.decodebin.link(self.screensink)
-##        self.queuev_3.link(self.disksink_rawvideo)
-        self.jpegdec.link(self.queuev_3)
-        self.queuev_3.link(self.screensink)
-        
-        return self.streampipe
-
-    def on_message(self, bus, message):
 
-        t = message.type
-        if t == Gst.MessageType.EOS:
-            self.pipel.set_state(Gst.State.NULL)
-            self.stream_button.set_label('Stream')
-        elif t == Gst.MessageType.ERROR:
-            err, debug = message.parse_error()
-            print ("Error: %s" % err, debug)
-            self.pipel.set_state(Gst.State.NULL)
-            self.stream_button.set_label('Stream')
+    def create_pipeline_instance(self, feed='main'):
+        """Creates pipeline instance and attaches it to GUI."""
+        if rtsp_address:
+            self.pipel = gstconf.New_user_pipeline(rtsp_address, feed,) 
+            bus = gstconf.get_gstreamer_bus()
+            bus.connect('sync-message::element', self.on_sync_message)
+            bus.connect('message', self.on_message)
+            return True
+        else:
+            self.camchoice_info.run()
+            self.camchoice_info.hide()
+            return False
+
+    def create_backup_pipeline(self):
+        labelname = self.stream_button.get_label()
+        if labelname == 'ON AIR':
+            self.create_pipeline_instance(feed='backup')
+            self.pipel.stream_play()
 
     def on_sync_message(self, bus, message):
 
@@ -254,25 +222,139 @@ class Streamgui(object):
             imagesink = message.src
             imagesink.set_property('force-aspect-ratio', True)
             imagesink.set_window_handle(self.videowidget.get_property('window').get_xid())
-            
-
-    # USE THAT FUNCTION TO GET THE SOURCE CHOICE (ELPHEL OR WEBCAM)        
-    def on_file_selected(self, widget):
-
-        self.multimedia_file = self.load_file.get_filename()
 
+    def on_message(self, bus, message):
+        # Getting the RMS audio level value:
+        s = Gst.Message.get_structure(message)
+        if message.type == Gst.MessageType.ELEMENT:
+            if str(Gst.Structure.get_name(s)) == 'level':
+                pct = self.iec_scale(s.get_value('rms')[0])
+                ##print('Level value: ', pct, '%') # [DEBUG]
+                self.vumeter_l.set_fraction(pct)
+                self.vumeter_r.set_fraction(pct)
+        # Watching for feed loss during streaming:
+        t = message.type
+        if t == Gst.MessageType.ERROR:
+            err, debug = message.parse_error()
+            if '(651)' not in debug:
+                # The error is not a socket error.
+                self.pipel.stream_stop()
+                self.build_filename(streamfailed=True)
+                self.create_backup_pipeline()
+            
     def on_stream_clicked(self, widget):
-
-        labelname = self.stream_button.get_label()
-        if labelname == 'Stream':
-            self.pipel.set_state(Gst.State.PLAYING)
-            self.stream_button.set_label('ON AIR')
-        elif labelname == 'ON AIR':
-            self.pipel.set_state(Gst.State.NULL)
-            self.stream_button.set_label('Stream')
-
-
+        labelname1 = self.stream_button.get_label()
+        labelname2 = self.test_button.get_label()
+        if labelname1 == 'Stream':
+            if labelname2 != 'Testing ...':
+                if self.create_pipeline_instance():
+                    self.clean_entry_fields()
+                    self.pipel.stream_play()
+                    self.stream_button.set_label('ON AIR')
+                    start_time = time()
+            else:
+                self.testmode_info.run()
+                self.testmode_info.hide()
+        elif labelname1 == 'ON AIR':
+            if self.build_filename():
+                self.pipel.stream_stop()
+                self.stream_button.set_label('Stream')
+
+    def on_test_clicked(self, widget):
+        labelname = self.test_button.get_label()
+        if labelname == 'Set-up test':
+            if self.create_pipeline_instance(feed='test'):
+                self.pipel.stream_play()
+                self.test_button.set_label('Testing ...')            
+        elif labelname == 'Testing ...':
+            self.pipel.stream_stop()
+            self.test_button.set_label('Set-up test')
+
+    def on_tbutton_toggled(self, tbutton, name):
+        global rtsp_address
+        running_cond = (self.stream_button.get_label() == 'ON AIR' or
+                        self.test_button.get_label() == 'Testing ...')
+        if running_cond:
+            tbutton.set_active(False)
+##            if tbutton.active():
+##                tbutton.set_active(True)
+            return
+            
+        if tbutton.get_active():
+            if name == 'cam1':
+                self.cam2_tbutton.set_active(False)
+                self.cam3_tbutton.set_active(False)
+                rtsp_address = IP_1 + PORT
+            elif name == 'cam2':
+                self.cam1_tbutton.set_active(False)
+                self.cam3_tbutton.set_active(False)
+                rtsp_address = IP_2 + PORT
+            elif name == 'cam3':
+                self.cam1_tbutton.set_active(False)
+                self.cam2_tbutton.set_active(False)
+                rtsp_address = IP_3 + PORT
+        
+    def build_filename(self, streamfailed=False):
+        """Get text in entries, check if empty and apply formatting if needed."""
+        sep = '_'
+        base = self.baseinfo_entry_label.get_text()
+        speaker = self.speakerinfo_entry.get_text()
+        speaker = sep.join(speaker.split())
+        session = self.sessioninfo_entry.get_text()
+        session = sep.join(session.split())
+        raw_filename = base + sep + speaker + sep + session
+        maxlen = 70
+        if speaker and session:
+            if len(raw_filename) >= maxlen:
+                offset = len(raw_filename) - maxlen
+                raw_filename = raw_filename[:-offset]
+                if streamfailed:
+                    self.pipel.set_filenames(raw_filename, streamfailed=True)
+                else:
+                    self.pipel.set_filenames(raw_filename,)
+##                    print('RAWFILENAME: ', raw_filename, ' <--')  # [DEBUG]
+            elif streamfailed:
+                self.pipel.set_filenames(raw_filename, streamfailed=True)
+            return True
+        elif not streamfailed:
+            self.entryfield_info.run()
+            self.entryfield_info.hide()
+            return False
+
+                
+    def clean_entry_fields(self):
+        self.speakerinfo_entry.set_text('')
+        self.sessioninfo_entry.set_text('')
+
+    def iec_scale(self, db):
+        """Returns the meter deflection percentage given a db value."""
+        pct = 0.0
+    
+        if db < -70.0:
+            pct = 0.0
+        elif db < -60.0:
+            pct = (db + 70.0) * 0.25
+        elif db < -50.0:
+            pct = (db + 60.0) * 0.5 + 2.5
+        elif db < -40.0:
+            pct = (db + 50.0) * 0.75 + 7.5
+        elif db < -30.0:
+            pct = (db + 40.0) * 1.5 + 15.0
+        elif db < -20.0:
+            pct = (db + 30.0) * 2.0 + 30.0
+        elif db < 0.0:
+            pct = (db + 20.0) * 2.5 + 50.0
+        else:
+            pct = 100.0
+        return pct / 100
+        
+    ## Use threading module to refresh the time elapsed sinc the begining of the stream??
+    def time_elapsed(self, widget):
+        if self.pipel.stream_get_state() == 'PLAYING':
+            pass
+            
+        
 if __name__ == "__main__":
     Gst.init()
     Streamgui()
-    Gtk.main()
\ No newline at end of file
+    Gtk.main()