GNOME Bugzilla – Bug 703436
eos unnormal
Last modified: 2013-07-03 08:31:28 UTC
I am in china,and can not send email by normal mailbox,I am sorry .and the script is : #!/usr/bin/env python # -=- encoding: utf-8 -=- ################ VIDEO RECEIVER import gi gi.require_version('Gst', '1.0') import threading from gi.repository import GObject, Gst import time, socket, fcntl, struct, logging Gst.init(None) class Recorder: def __init__(self, callid): self.callid=callid # create pipeline and rtpbin self.pipeline = Gst.Pipeline() self.rtpbin = Gst.ElementFactory.make('rtpbin', 'rtpbin') self.rtpbin.set_property('latency', 400) #rtp VIDEO parts elements self.udpsrc_rtpin_video = Gst.ElementFactory.make('udpsrc', 'udpsrc0') self.udpsrc_caps_video = Gst.caps_from_string('application/x-rtp,media=(string)video,clock-rate=(int)90000,encoding-name=(string)H264') self.udpsrc_rtpin_video.set_property('caps', self.udpsrc_caps_video) self.udpsrc_rtcpin_video = Gst.ElementFactory.make('udpsrc', 'udpsrc1') self.rtp_port_v = self.get_rtp_port_video() print self.rtp_port_v self.udpsrc_rtcpin_video.set_property('port',self.rtp_port_v+1) self.udpsink_rtcpout_video = Gst.ElementFactory.make('udpsink', 'udpsink0') #rtp AUDIO parts elements self.udpsrc_rtpin_audio = Gst.ElementFactory.make('udpsrc', 'udpsrc2') self.udpsrc_caps_audio = Gst.caps_from_string('application/x-rtp,media=(string)audio,clock-rate=(int)8000,encoding-name=(string)PCMU') self.udpsrc_rtpin_audio.set_property('caps', self.udpsrc_caps_audio) self.udpsrc_rtcpin_audio = Gst.ElementFactory.make('udpsrc', 'udpsrc3') self.rtp_port_a = self.get_rtp_port_audio() self.udpsrc_rtcpin_audio.set_property('port',self.rtp_port_a+1) self.udpsink_rtcpout_audio = Gst.ElementFactory.make('udpsink', 'udpsink1') #VIDEO dealing elements self.rtph264depay = Gst.ElementFactory.make('rtph264depay', 'rtpdepay') self.h264parse = Gst.ElementFactory.make('h264parse','h264parse') self.q1 = Gst.ElementFactory.make("queue", "q1") self.q2 = Gst.ElementFactory.make("queue", "q2") #AUDIO dealing elements self.pcmudepay = Gst.ElementFactory.make('rtppcmudepay','rtppcmudepay') self.mulawdec = Gst.ElementFactory.make('mulawdec','mulawdec') self.audioresample = Gst.ElementFactory.make('audioresample','audioresample') self.faac = Gst.ElementFactory.make('faac','faac') #mp4mux and filesink elements self.mp4mux = Gst.ElementFactory.make('mp4mux', 'mp4mux') self.filesink = Gst.ElementFactory.make('filesink', 'filesink') curr_time = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time())) filename = str(callid) + ' ' + curr_time + '.mp4' self.filesink.set_property('location', filename) #add the elements into the pipeline self.pipeline.add(self.rtpbin) self.pipeline.add(self.udpsrc_rtpin_video) self.pipeline.add(self.udpsrc_rtcpin_video) self.pipeline.add(self.udpsink_rtcpout_video) self.pipeline.add(self.udpsrc_rtpin_audio) self.pipeline.add(self.udpsrc_rtcpin_audio) self.pipeline.add(self.udpsink_rtcpout_audio) self.pipeline.add(self.pcmudepay) self.pipeline.add(self.mulawdec) self.pipeline.add(self.audioresample) self.pipeline.add(self.faac) self.pipeline.add(self.rtph264depay) self.pipeline.add(self.h264parse) self.pipeline.add(self.mp4mux) self.pipeline.add(self.filesink) self.pipeline.add(self.q1) self.pipeline.add(self.q2) # Receive the VIDEO RTP and RTCP streams self.udpsrc_rtpin_video.link_pads('src',self.rtpbin , 'recv_rtp_sink_0') self.udpsrc_rtcpin_video.link_pads('src', self.rtpbin, 'recv_rtcp_sink_0') # Receive the AUDIO RTP and RTCP streams self.udpsrc_rtpin_audio.link_pads('src',self.rtpbin , 'recv_rtp_sink_1') self.udpsrc_rtcpin_audio.link_pads('src', self.rtpbin, 'recv_rtcp_sink_1') # reply with RTCP stream self.rtpbin.link_pads('send_rtcp_src_0', self.udpsink_rtcpout_video, 'sink') self.rtpbin.link_pads('send_rtcp_src_1', self.udpsink_rtcpout_audio, 'sink') #Link them self.rtpbin.connect('pad-added', self.rtpbin_pad_added) self.rtph264depay.link_pads('src',self.h264parse,'sink') self.pcmudepay.link(self.mulawdec) self.mulawdec.link(self.audioresample) self.audioresample.link(self.lame) self.h264parse.link(self.q1) self.q1.link_pads('src',self.mp4mux,'video_0') self.faac.link(self.q2) self.q2.link_pads('src',self.mp4mux,'audio_0') self.mp4mux.link(self.filesink) self.bus = self.pipeline.get_bus() self.bus.add_signal_watch() def eos(self): print "eos()" self.bus.add_signal_watch() print self.pipeline.send_event(Gst.Event.new_eos()) def on_eos(self, bus, msg): print "on_eos()" bus.remove_signal_watch() self.pipeline.set_state(Gst.State.NULL) def rtpbin_pad_added(self,obj, pad): print "PAD ADDED" pad_name = pad.get_name() if pad_name[0:14] == 'recv_rtp_src_0': v_pad = self.rtph264depay.get_static_pad("sink") pad.link(v_pad) print "Video stream is coming" elif pad_name[0:14] == 'recv_rtp_src_1': a_pad = self.pcmudepay.get_static_pad("sink") pad.link(a_pad) print "audio stream is coming" def get_rtp_port_video(self): self.udpsrc_rtpin_video.set_property('port', 0) self.udpsrc_rtpin_video.set_state(Gst.State.PAUSED) port_video = self.udpsrc_rtpin_video.get_property('port') self.udpsrc_rtpin_video.set_state(Gst.State.NULL) return port_video def get_rtp_port_audio(self): self.udpsrc_rtpin_audio.set_property('port', 0) self.udpsrc_rtpin_audio.set_state(Gst.State.PAUSED) port_audio = self.udpsrc_rtpin_audio.get_property('port') self.udpsrc_rtpin_audio.set_state(Gst.State.NULL) return port_audio def get_local_address(self): rtp_port_v = self.rtp_port_v rtp_port_a = self.rtp_port_a address = (get_ip_address('eth0'),rtp_port_a,rtp_port_a+1,rtp_port_v, rtp_port_v+1) return address def start_stream(self): self.log() self.pipeline.set_state(Gst.State.PLAYING) self.udpsink_rtcpout_audio.set_locked_state(Gst.State.PLAYING) self.udpsink_rtcpout_video.set_locked_state(Gst.State.PLAYING) print "Started..." print "Running..." def stop_stream(self): self.eos() time.sleep(10) print "stop" self.filesink.set_state(Gst.State.NULL) def get_ip_address(ifname): s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) return socket.inet_ntoa(fcntl.ioctl( s.fileno(), 0x8915, # SIOCGIFADDR struct.pack('256s', ifname[:15]) )[20:24]) if __name__ == '__main__': R = Recorder(111) R.get_local_address() R.start_stream()
I used gstreamer1.0 to record mp4 file by python in ubuntu Linux some days ago.It receives audio and video rtp stream.And I push them into 'mp4mux',in the end,I send an 'eos' to the pipeline,and the bus can capture the 'message::eos' finally.It worked well,and I can get the correct mp4 file.And I have to upgrade it to gstreamer1.1.1,but it can not work normal.I can get the file ,but the file is unbormal,and can not play.
Unrelated but you might want to use GIO (GSocket and friends) for the socket code :) What exactly is wrong with the resulting file when using 1.1.1? Are there any warnings printed on the terminal?
Thanks, but there's no need to create a new bug for this. Next time please just add an attachment or comment to the existing bug. *** This bug has been marked as a duplicate of bug 703370 ***
I am sorry for that.... (In reply to comment #3) > Thanks, but there's no need to create a new bug for this. Next time please just > add an attachment or comment to the existing bug. > > *** This bug has been marked as a duplicate of bug 703370 ***
(In reply to comment #2) > Unrelated but you might want to use GIO (GSocket and friends) for the socket > code :) > > > What exactly is wrong with the resulting file when using 1.1.1? Are there any > warnings printed on the terminal? Thank you: there is no warning on the terminal. the problem is that the bus can not receive the 'message::eos',or received it but not callback my function. And the mp4 file is not correct ,its frame rate is 7 frame/s. But in the gstreamer1.0,it is 21 frame/s.
any one can help me ?