Skip to content

Commit 75cb01a

Browse files
tendwongneilcsmith-net
authored andcommitted
Footage Capture, Process Video from RTSP, Process Video from MP4 (#10)
* This is an example that makes use of AppSrc and AppSink to write captured video samples on demand. The program records footage that is 5 seconds before the time in which the recording is triggered. Thus this program can also be used for surveillance etc. * Added some helpful comments. * Added slight correction to comments. Also added buffer.dispose to clearQueue. * Corrected jna issue displayed when Gst.deinit is called by adding buffer.disown. Also added a few more comments. * Format nicely. * Added simple check for RTSP with no audio. Also added copyright header. * There was always a 2 second silence at the end of each MP4 file. Solved this issue by adding speed-preset=ultrafast at the end of x264enc. * Added new example to show how to process the video before saving it into MP4. * Added 2 new examples to show how to process the video coming from either RTSP stream or MP4 file. The old TestProcessVideo.java has been removed.
1 parent e7f3445 commit 75cb01a

File tree

3 files changed

+1168
-0
lines changed

3 files changed

+1168
-0
lines changed
Lines changed: 308 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,308 @@
1+
/*
2+
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
3+
*
4+
* Copyright 2018 Tend Wong.
5+
*
6+
* Copying and distribution of this file, with or without modification,
7+
* are permitted in any medium without royalty provided the copyright
8+
* notice and this notice are preserved. This file is offered as-is,
9+
* without any warranty.
10+
*
11+
*/
12+
package org.freedesktop.gstreamer.examples;
13+
14+
import java.net.URI;
15+
import java.util.Scanner;
16+
import java.util.concurrent.ArrayBlockingQueue;
17+
import java.util.concurrent.Semaphore;
18+
import java.util.concurrent.TimeUnit;
19+
20+
import org.freedesktop.gstreamer.Bin;
21+
import org.freedesktop.gstreamer.Buffer;
22+
import org.freedesktop.gstreamer.Bus;
23+
import org.freedesktop.gstreamer.Caps;
24+
import org.freedesktop.gstreamer.FlowReturn;
25+
import org.freedesktop.gstreamer.Gst;
26+
import org.freedesktop.gstreamer.Pipeline;
27+
import org.freedesktop.gstreamer.Sample;
28+
import org.freedesktop.gstreamer.elements.AppSink;
29+
import org.freedesktop.gstreamer.elements.AppSrc;
30+
import org.freedesktop.gstreamer.elements.BaseSink;
31+
import org.freedesktop.gstreamer.elements.PlayBin;
32+
import org.freedesktop.gstreamer.event.EOSEvent;
33+
34+
/**
35+
* This example shows how to use the various gstreamer mechanisms to
36+
* 1) read from an RTSP stream
37+
* 2) put it into a time delaying queue so that the sample that arrives at
38+
* a sink is footage captured 5 seconds ago,
39+
* 3) place the sample into an AppSink which can discard or collect it into
40+
* a Java queue
41+
* 4) at the user's discretion, start reading the samples from the Java queue
42+
* using an AppSrc
43+
* 5) encode the samples into an MP4 stream
44+
* 6) write the stream to a file using FileSink using a filename that is based
45+
* on the current time.
46+
*
47+
* @author Tend Wong
48+
*/
49+
public class TestFootageCapture {
50+
// Size of Java queue
51+
private final static int BUFFER_SIZE = 100;
52+
53+
private static boolean sendData = false;
54+
private static ArrayBlockingQueue<Buffer> videoQueue = new ArrayBlockingQueue<Buffer>(BUFFER_SIZE);
55+
private static ArrayBlockingQueue<Buffer> audioQueue = new ArrayBlockingQueue<Buffer>(BUFFER_SIZE);
56+
private static StringBuffer videoCaps = new StringBuffer();
57+
private static StringBuffer audioCaps = new StringBuffer();
58+
private static Semaphore gotCaps = new Semaphore(2);
59+
private static Semaphore canSend = new Semaphore(2);
60+
private static Semaphore gotEOSPlaybin = new Semaphore(1);
61+
private static Semaphore gotEOSPipeline = new Semaphore(1);
62+
63+
public static void main(String args[]) throws Exception {
64+
Gst.init();
65+
66+
System.out.println("GST finished initialization.");
67+
68+
Scanner s = new Scanner(System.in);
69+
70+
// The time delaying queue is specified below. Specify a different value or take out the queue
71+
// completely for real time capture.
72+
Bin videoBin = Bin.launch("queue max-size-time=10000000000 min-threshold-time=5000000000 flush-on-eos=true ! appsink name=videoAppSink",true);
73+
Bin audioBin = Bin.launch("queue max-size-time=10000000000 min-threshold-time=5000000000 flush-on-eos=true ! appsink name=audioAppSink",true);
74+
75+
AppSink videoAppSink = (AppSink) videoBin.getElementByName("videoAppSink");
76+
AppSink audioAppSink = (AppSink) audioBin.getElementByName("audioAppSink");
77+
videoAppSink.set("emit-signals", true);
78+
audioAppSink.set("emit-signals", true);
79+
80+
AppSinkListener videoAppSinkListener = new AppSinkListener(videoQueue,videoCaps, gotCaps);
81+
videoAppSink.connect((AppSink.NEW_SAMPLE) videoAppSinkListener);
82+
AppSinkListener audioAppSinkListener = new AppSinkListener(audioQueue,audioCaps, gotCaps);
83+
audioAppSink.connect((AppSink.NEW_SAMPLE) audioAppSinkListener);
84+
85+
// Specify rtsp url below
86+
PlayBin playbin = new PlayBin("playbin");
87+
playbin.setURI(URI.create("rtsp://ip:port/uri"));
88+
playbin.setVideoSink(videoBin);
89+
playbin.setAudioSink(audioBin);
90+
91+
playbin.getBus().connect((Bus.EOS) (source) -> {
92+
System.out.println("Received the EOS on the playbin!!!");
93+
gotEOSPlaybin.release();
94+
});
95+
96+
// playbin.getBus().connect((Bus.ERROR) (source, code, message) -> {
97+
// System.out.println("Error Source: " + source.getName());
98+
// System.out.println("Error Code: " + code);
99+
// System.out.println("Error Message: " + message);
100+
// });
101+
// playbin.getBus().connect((Bus.MESSAGE) (bus, message) -> {
102+
// System.out.println("Bus Message : " + message.getStructure());
103+
// });
104+
105+
gotEOSPlaybin.drainPermits();
106+
gotCaps.drainPermits();
107+
playbin.play();
108+
109+
System.out.println("Processing of RTSP feed started, please wait...");
110+
111+
Pipeline pipeline = null;
112+
AppSrc videoAppSrc = null;
113+
AppSrc audioAppSrc = null;
114+
AppSrcListener videoAppSrcListener = null;
115+
AppSrcListener audioAppSrcListener = null;
116+
117+
// Get caps from original video and audio stream and copy them into
118+
// the respective AppSrcs. If RTSP stream has no audio, the
119+
// gotCaps.tryAcquire will timeout and audioCaps will be empty.
120+
gotCaps.acquire(1);
121+
gotCaps.tryAcquire(5, TimeUnit.SECONDS);
122+
123+
// Pipeline below encodes and writes samples to MP4 file
124+
// You must ensure the following plugins are available to your gstreamer
125+
// installation : x264enc, h264parse, mpegtsmux, faac, aacparse
126+
127+
// If your RTSP feed has no audio, a different pipeline will be used that
128+
// encodes a video only MP4 file.
129+
boolean hasAudio = (audioCaps.length()>0);
130+
if (hasAudio) {
131+
pipeline = Pipeline.launch(
132+
"appsrc name=videoAppSrc "+
133+
"! rawvideoparse use-sink-caps=true "+
134+
"! videoconvert ! x264enc speed-preset=ultrafast ! h264parse "+
135+
"! mpegtsmux name=mux "+
136+
"! filesink sync=false name=filesink "+
137+
"appsrc name=audioAppSrc "+
138+
"! rawaudioparse use-sink-caps=true "+
139+
"! audioconvert ! faac ! aacparse ! mux. "
140+
);
141+
142+
audioAppSrc = (AppSrc) pipeline.getElementByName("audioAppSrc");
143+
audioAppSrc.setCaps(new Caps(audioCaps.toString()));
144+
audioAppSrc.set("emit-signals", true);
145+
146+
audioAppSrcListener = new AppSrcListener(audioQueue,canSend);
147+
audioAppSrc.connect((AppSrc.NEED_DATA) audioAppSrcListener);
148+
}
149+
else {
150+
System.out.println("RTSP stream has no audio.");
151+
152+
pipeline = Pipeline.launch(
153+
"appsrc name=videoAppSrc "+
154+
"! rawvideoparse use-sink-caps=true "+
155+
"! videoconvert ! x264enc speed-preset=ultrafast ! h264parse "+
156+
"! mpegtsmux name=mux "+
157+
"! filesink sync=false name=filesink "
158+
);
159+
}
160+
161+
videoAppSrc = (AppSrc) pipeline.getElementByName("videoAppSrc");
162+
videoAppSrc.setCaps(new Caps(videoCaps.toString()));
163+
videoAppSrc.set("emit-signals", true);
164+
165+
videoAppSrcListener = new AppSrcListener(videoQueue,canSend);
166+
videoAppSrc.connect((AppSrc.NEED_DATA) videoAppSrcListener);
167+
168+
pipeline.getBus().connect((Bus.EOS) (source) -> {
169+
System.out.println("Received the EOS on the pipeline!!!");
170+
gotEOSPipeline.release();
171+
});
172+
173+
// pipeline.getBus().connect((Bus.ERROR) (source, code, message) -> {
174+
// System.out.println("Error Source: " + source.getName());
175+
// System.out.println("Error Code: " + code);
176+
// System.out.println("Error Message: " + message);
177+
// });
178+
// pipeline.getBus().connect((Bus.MESSAGE) (bus, message) -> {
179+
// System.out.println("Bus Message : "+message.getStructure());
180+
// });
181+
182+
while (true) {
183+
System.out.println("Press ENTER to start capturing footage from the cam 5 seconds ago, or type 'QUIT' and press ENTER to exit...");
184+
if (!s.nextLine().isEmpty())
185+
break;
186+
187+
// Specify filename of MP4 file based on current time
188+
BaseSink filesink = (BaseSink) pipeline.getElementByName("filesink");
189+
filesink.set("location", "capture" + System.currentTimeMillis() + ".mp4");
190+
191+
// Clear any unread buffers from previous capture in the Java queues
192+
if (hasAudio) {
193+
clearQueue(audioQueue);
194+
audioAppSrcListener.resetSendFlagged();
195+
}
196+
clearQueue(videoQueue);
197+
videoAppSrcListener.resetSendFlagged();
198+
199+
gotEOSPipeline.drainPermits();
200+
canSend.drainPermits();
201+
pipeline.play();
202+
203+
// Make sure that both video and audio buffers are streamed out at
204+
// the same time otherwise you get video or sound first.
205+
canSend.acquire(hasAudio ? 2 : 1);
206+
sendData = true;
207+
208+
System.out.println("Press ENTER to stop the capture...");
209+
s.nextLine();
210+
211+
pipeline.sendEvent(new EOSEvent());
212+
gotEOSPipeline.acquire(1);
213+
System.out.println("Capture stopped.");
214+
215+
pipeline.stop();
216+
sendData = false;
217+
}
218+
219+
playbin.sendEvent(new EOSEvent());
220+
gotEOSPlaybin.acquire(1);
221+
System.out.println("Stopped processing of RTSP feed.");
222+
223+
playbin.stop();
224+
225+
System.out.println("Exiting program.");
226+
227+
Gst.deinit();
228+
}
229+
230+
private static void clearQueue(ArrayBlockingQueue<Buffer> queue) {
231+
queue.clear();
232+
}
233+
234+
private static class AppSinkListener implements AppSink.NEW_SAMPLE {
235+
private ArrayBlockingQueue<Buffer> queue;
236+
private StringBuffer caps;
237+
private Semaphore gotCaps;
238+
239+
private boolean capsSet;
240+
241+
public AppSinkListener(ArrayBlockingQueue<Buffer> queue, StringBuffer caps, Semaphore gotCaps) {
242+
this.queue = queue;
243+
this.caps = caps;
244+
this.gotCaps = gotCaps;
245+
capsSet = false;
246+
}
247+
248+
@Override
249+
public FlowReturn newSample(AppSink elem) {
250+
Sample sample = elem.pullSample();
251+
252+
if (!capsSet) {
253+
caps.append(sample.getCaps().toString());
254+
capsSet = true;
255+
gotCaps.release();
256+
}
257+
258+
// This section will be executed only when the sample needs to be
259+
// passed to the src.
260+
// When sendData is true, the sample's buffer will be duplicated
261+
// using buffer.copy and offered to the respective queue
262+
// (videoQueue or audioQueue).
263+
// Buffer's copy must disown the native object held by original
264+
// buffer otherwise a jna error will be issued.
265+
266+
if (sendData) {
267+
Buffer buffer = sample.getBuffer().copy();
268+
buffer.disown();
269+
queue.offer(buffer);
270+
}
271+
272+
sample.dispose();
273+
274+
return FlowReturn.OK;
275+
}
276+
}
277+
278+
private static class AppSrcListener implements AppSrc.NEED_DATA {
279+
private ArrayBlockingQueue<Buffer> queue;
280+
private Semaphore canSend;
281+
282+
private boolean sendFlagged;
283+
284+
public AppSrcListener(ArrayBlockingQueue<Buffer> queue, Semaphore canSend) {
285+
this.queue = queue;
286+
this.canSend = canSend;
287+
sendFlagged = false;
288+
}
289+
290+
public void resetSendFlagged() {
291+
sendFlagged = false;
292+
}
293+
294+
@Override
295+
public void needData(AppSrc elem, int size) {
296+
if (!sendFlagged) {
297+
sendFlagged = true;
298+
canSend.release();
299+
}
300+
301+
try {
302+
elem.pushBuffer(queue.take());
303+
} catch (InterruptedException e) {
304+
e.printStackTrace();
305+
}
306+
}
307+
}
308+
}

0 commit comments

Comments
 (0)