Convert RTSP stream to MP4 - html

I have a IP Camera that supports RTSP, and I need to display this stream to multiple clients using HTML5.
Since HTML Video tag doesn't support RTSP, I'm calling ffmpeg to encode it to a WEBM stream, but the result is very glitchy and distorts the original stream.
The command im using is the following:
ffmpeg -i my_RSTP_URL -vcodec libvpx -f webm -
To distribute the stream I'm using a Node.js instance that calls the rtsp stream via ffpmeg when needed.
The solution looks like such:
Camera --Via RSTP--> ffmpeg --Encodes to WEBM--> Node.js --Via HTML5 Video--> Client
Node.js code:
var request = require('request');
var http = require('http');
var child_process = require("child_process");
var stdouts = {};
http.createServer(function (req, resp) {
switch (params[0])
{
case "LIVE":
resp.writeHead(200, {'Content-Type': 'video/mp4', 'Connection': 'keep-alive'});
// Start ffmpeg
var ffmpeg = child_process.spawn("ffmpeg",[
"-i","my_RSTP_URL", // Capture offset
"-vcodec","libvpx", // vp8 encoding
"-f","webm", // File format
"-" // Output to STDOUT
]);
ffmpeg.on('exit', function()
{
console.log('ffmpeg terminado');
});
ffmpeg.on('error',function(e)
{
console.log(e);
})
ffmpeg.stdout.on('data',function(data)
{
console.log('datos'+data);
});
ffmpeg.stderr.on('data', function(data) {
console.log('stderr'+data);
});
stdouts[params[1]] = ffmpeg.stdout;
// Pipe the video output to the client response
ffmpeg.stdout.pipe(resp);
console.log("Initializing camera");
break;
}
}).listen(8088);
console.log('Server running at port 8088');
Am I using the wrong library codec? Or why Am I getting such a weird result?

It seems to me, this can help for https://github.com/kyriesent/node-rtsp-stream
Also I worked with this technology, you can visit repository on bitBucket: https://bitbucket.org/kaleniuk_ihor/neuro_vision/src/db_watch/
Оn the other hand, your code may not work because you did not install ffmpeg at the root of drive C.

Related

Play stream from gstreamer in browser

I want to play stream from gstreamer in a web browser.
I played around a with RTP, WebRTC and SDP files but, while VLC was able to connect to stream by simple SDP, browsers were not. I later understood that WebRTC requires secure connection which only complicates things and is not needed for my purposes. I stumbled upon Media Source Extension (MSE) of html5, which seems that it could help, but I'm not able to find some comprehensive tutorial or appropriate specs on how to get gstreamer to stream correct data and later how to play them using MSE. I'm also not sure about latency with using MSE.
So is there a way to play stream from gstreamer in a browser?
Thanks.
Using node webrtc project, I was able to combine output from gstreamer with webrtc call. For gstreamer, there is a project which enables it's use with node gstreamer superficial. So basically, you need to run gstremaer process from node process, which can then control output from gstremaer. On every gstreamer frame there is a callback called which takes the frame and can send it to webrtc calls.
Then an webrtc calls needs to be implemented. There is required some signaling protocol for calls. One side of the call will be the server and another will be the client's browser, instead of two browsers. Then a video track will be created where frames from gstreamer superficial will be pushed.
const { RTCVideoSource } = require("wrtc").nonstandard;
const gstreamer = require("gstreamer-superficial");
const source = new RTCVideoSource();
// This is WebRTC video track which should be used with addTransceiver see below
const track = source.createTrack();
const frame = {
width: 1920,
height: 1080,
data: null
};
const pipeline = new gstreamer.Pipeline("v4l2src ! videorate ! video/x-raw,format=YUY2,width=1920,height=1080,framerate=25/1 ! videoconvert ! video/x-raw,format=I420 ! appsink name=sink");
const appsink = pipeline.findChild("sink");
const pull = function() {
appsink.pull(function(buf, caps) {
if (buf) {
frame.data = new Uint8Array(buf);
try {
source.onFrame(frame);
} catch (e) {}
pull();
} else if (!caps) {
console.log("PULL DROPPED");
setTimeout(pull, 500);
}
});
};
pipeline.play();
pull();
// Example:
const useTrack = SomeRTCPeerConnection => SomeRTCPeerConnection.addTransceiver(track, { direction: "sendonly" });

Audio recorded with MediaRecorder on Chrome missing duration

I am recording audio (oga/vorbis) files with MediaRecorder. When I record these file through Chrome I get problems: I cannot edit the files on ffmpeg and when I try to play them on Firefox it says they are corrupt (they do play fine on Chrome though).
Looking at their metadata on ffmpeg I get this:
Input #0, matroska,webm, from '91.oga':
Metadata:
encoder : Chrome
Duration: N/A, start: 0.000000, bitrate: N/A
Stream #0:0(eng): Audio: opus, 48000 Hz, mono, fltp (default)
[STREAM]
index=0
codec_name=opus
codec_long_name=Opus (Opus Interactive Audio Codec)
profile=unknown
codec_type=audio
codec_time_base=1/48000
codec_tag_string=[0][0][0][0]
codec_tag=0x0000
sample_fmt=fltp
sample_rate=48000
channels=1
channel_layout=mono
bits_per_sample=0
id=N/A
r_frame_rate=0/0
avg_frame_rate=0/0
time_base=1/1000
start_pts=0
start_time=0.000000
duration_ts=N/A
duration=N/A
bit_rate=N/A
max_bit_rate=N/A
bits_per_raw_sample=N/A
nb_frames=N/A
nb_read_frames=N/A
nb_read_packets=N/A
DISPOSITION:default=1
DISPOSITION:dub=0
DISPOSITION:original=0
DISPOSITION:comment=0
DISPOSITION:lyrics=0
DISPOSITION:karaoke=0
DISPOSITION:forced=0
DISPOSITION:hearing_impaired=0
DISPOSITION:visual_impaired=0
DISPOSITION:clean_effects=0
DISPOSITION:attached_pic=0
TAG:language=eng
[/STREAM]
[FORMAT]
filename=91.oga
nb_streams=1
nb_programs=0
format_name=matroska,webm
format_long_name=Matroska / WebM
start_time=0.000000
duration=N/A
size=7195
bit_rate=N/A
probe_score=100
TAG:encoder=Chrome
As you can see there are problems with the duration. I have looked at posts like this:
How can I add predefined length to audio recorded from MediaRecorder in Chrome?
But even trying that, I got errors when trying to chop and merge files.For example when running:
ffmpeg -f concat -i 89_inputs.txt -c copy final.oga
I get a lot of this:
[oga # 00000000006789c0] Non-monotonous DTS in output stream 0:0; previous: 57612, current: 1980; changing to 57613. This may result in incorrect timestamps in the output file.
[oga # 00000000006789c0] Non-monotonous DTS in output stream 0:0; previous: 57613, current: 2041; changing to 57614. This may result in incorrect timestamps in the output file.
DTS -442721849179034176, next:42521 st:0 invalid dropping
PTS -442721849179034176, next:42521 invalid dropping st:0
[oga # 00000000006789c0] Non-monotonous DTS in output stream 0:0; previous: 57614, current: 2041; changing to 57615. This may result in incorrect timestamps in the output file.
[oga # 00000000006789c0] Timestamps are unset in a packet for stream 0. This is deprecated and will stop working in the future. Fix your code to set the timestamps properly
DTS -442721849179031296, next:42521 st:0 invalid dropping
PTS -442721849179031296, next:42521 invalid dropping st:0
Does anyone know what we need to do to audio files recorded from Chrome for them to be useful? Or is there a problem with my setup?
Recorder js:
if (navigator.getUserMedia) {
console.log('getUserMedia supported.');
var constraints = { audio: true };
var chunks = [];
var onSuccess = function(stream) {
var mediaRecorder = new MediaRecorder(stream);
record.onclick = function() {
mediaRecorder.start();
console.log(mediaRecorder.state);
console.log("recorder started");
record.style.background = "red";
stop.disabled = false;
record.disabled = true;
var aud = document.getElementById("audioClip");
start = aud.currentTime;
}
stop.onclick = function() {
console.log(mediaRecorder.state);
console.log("Recording request sent.");
mediaRecorder.stop();
}
mediaRecorder.onstop = function(e) {
console.log("data available after MediaRecorder.stop() called.");
var audio = document.createElement('audio');
audio.setAttribute('controls', '');
audio.setAttribute('id', 'audioClip');
audio.controls = true;
var blob = new Blob(chunks, { 'type' : 'audio/ogg; codecs="vorbis"' });
chunks = [];
var audioURL = window.URL.createObjectURL(blob);
audio.src = audioURL;
sendRecToPost(blob); // this just send the audio blob to the server by post
console.log("recorder stopped");
}
I found at the ffmpeg documentation that we can set metadata at the conversion using this option:
//-metadata[:metadata_specifier] key=value (output,per-metadata)
//Set a metadata key/value pair.
ffmpeg -i in.avi -metadata title="my title" out.flv
You can also test if the duration conversion limit works on your case:
//-t duration (input/output)
//When used as an input option (before -i), limit the duration of data read from the input file.
//When used as an output option (before an output url), stop writing the output after its duration reaches duration.

HTTP Header for Duration of a MP4 for HTML 5 video

I am trying to stream MP4 video as it is encoded from a webserver. I believe I used the appropriate flags, but it is not working correctly. When I download the video from my stream and open it with VLC, it properly shows the duration. Since a socket is not seekable, I assume it writes the metadata to end? My Chrome browser always shows 8 seconds duration. The first 8 seconds plays at the normal speed, but afterwards the pause button turns into play button and the video plays very fast, probably as fast as it is recieved. However the audio is played at normal speed. I tried document.getElementById('myVid').duration = 20000 but it is a readonly field.
I wonder, is there anyway to explicitly state the duration in HTTP headers or in any other way? I cannot find any documentation about it.
ffmpeg -i - -vcodec libx264 -acodec libvo_aacenc -ar 44100 -ac 2 -ab 128000 -f mp4 -movflags frag_keyframe+faststart pipe:1 -fflags +genpts -re -profile baseline -level 30 -preset fast
To close-voters, that thinks it is not programming related, I use it in my own server I coded, and I need to set the duration programatically via JavaScript or setting the HTTP header. I believe it may be related to both ffmpeg or http headers, that's why I posted it here.
app.get("/video/*", function(req,res){
res.writeHead(200, {
'Content-Type': 'video/mp4',
});
var dir = req.url.split("/").splice(2).join("/");
var buf = new Buffer(dir, 'base64');
var src = buf.toString();
var Transcoder = require('stream-transcoder');
var stream = fs.createReadStream(src);
// I added my own flags to this module, they are at below:
new Transcoder(stream)
.videoCodec('libx264')
.audioCodec("libvo_aacenc")
.sampleRate(44100)
.channels(2)
.audioBitrate(128 * 1000)
.format('mp4')
.on('finish', function() {
console.log("finished");
})
.stream().pipe(res);
});
exec function in that stream-transcoder module,
a.push("-fflags");
a.push("+genpts");
a.push("-re");
a.push("-profile");
a.push("baseline");
a.push("-level");
a.push("30");
a.push("-preset");
a.push("fast");
a.push("-strict");
a.push("experimental");
a.push("-frag_duration");
a.push("" + 2 * (1000 * 1000));
var child = spawn('ffmpeg', a, {
cwd: os.tmpdir()
});
I believe the X-Content-Duration header is what you need.
Mozilla documentation on X-Content-Duration*
* The documentation discusses the OGG format, but the principle applies to other video formats

How to get MP4 using JavaCV for live video streaming

I am trying to make an application that gets the streaming video data using JavaCV and send it to Web socket server. Then, the Web socket server distributes the video data to connected client(s).
1. Application gets live streaming data(MP4) from my PC's webcam using JavaCV.
2. Application keeps sending it to web socket server.
3. The server gets it as binary and send it to connected client.
4. Web browser connects to the server and Javascript running on browser shows the live video after receiving it from server.
I am new to JavaCV and OpenCV. The snippet below works for drawing video using com.googlecode.javacv.CanvasFrame with no problem. However, I am not sure how to grab MP4 data as live streaming data.
try {
FrameGrabber grabber = FrameGrabber.createDefault(0);
grabber.setFormat("mp4");;
grabber.setFrameRate(30);
grabber.setImageWidth(640);
grabber.setImageHeight(480);
grabber.start();
double frameRate = grabber.getFrameRate();
long wait = (long) (1000 / (frameRate == 0 ? 10 : frameRate));
ByteBuffer buf = null;
while (true) {
Thread.sleep(wait);
IplImage image = grabber.grab();
if(image != null) {
buf = image.getByteBuffer();
send(buf); // Send video data using web socket.
}
}
} catch (FrameGrabber.Exception ex) {
Logger.getLogger(WSockClient.class.getName()).log(Level.SEVERE, null, ex);
}
The below are the code for web socket server and Javascript/HTML5 client.
The both code works fine if the application reads MP4 file from local disk, and send it to the server. But the data using JavaCV like above seems to be invalid for showing on browser.
The server just receives the data and distribute it to the clients. I believe there seems to have no problem. Here is the code.
#OnMessage
public void binaryMessage(ByteBuffer buf, Session client) throws IOException, EncodeException {
for (Session otherSession : peers) {
if (!otherSession.equals(client)) {
otherSession.getAsyncRemote().sendBinary(buf, new StreamHandler());
}
}
}
Here is the Javascript/HTML5.
<body>
<video controls width="640" height="480" autoplay></video><br>
<canvas id="canvas1" width="640" height="480"></canvas><br>
</body>
<script>
var ws;
var protocol = 'ws';
var host = "localhost:8080";
var url = protocol + "://" + host + "/live/stream";
ws = new WebSocket(url);
ws.binaryType = 'arraybuffer';
ws.addEventListener("open",onOpenWebSocket,false);
ws.addEventListener("close",onCloseWebSocket,false);
ws.addEventListener("message",onMessageWebSocket,false);
window.addEventListener("unload",onUnload,false);
...
function onMessageWebSocket(event){
var blob = new Blob([event.data], {type: 'video/mp4'});
var rb = blob.slice(0, blob.size, 'video/mp4');
video.src = window.URL.createObjectURL(rb);
}
...
</script>
I think I need to obtain MP4 data as stream using JavaCV, but don't know how to do that. Please help.
Any comment, suggestions would be appreciated. Thank you.

can't seek html5 video or audio in chrome

I've been fiddling with the hell that is HTML5 video/audio for a couple of weeks now. Usually the reason why something failed popped up after a while, but I've been, unable to find why I get forwarding and rewinding problems in chrome.
Anyhow...
The video or audio tag is being loaded in an extjs panel when a video or audio file is requested. The files are sent as streams and they work fine in IE and firefox (after adding duration to the response header)
There's an issue with safari, but it's apparently the fact that the whole site runs in HTTPS (that's being worked on right now).
In chrome (which is my issue and is at the latest version) the video and audio loads just fine, but I'm unable to rewind or forward. When trying to seek videos just go ahead a few seconds until it reaches the end of the stream. the audio also plays just fine but trying to rewind (or forward) multiple times simply breaks the progress bar and stops the audio from playing on.
I'm not entirely sure what's being sent from the server, but I'm wondering if this might be caused by missing data in the response. If it's not that anything else to point me towards a fix is just as welcome. I think I've covered pretty much the whole set up and I've made sure that there's a source tag for each browser.
edit: this is the code generated by the javascript for one of the files:
<video width="1889" height="2" preload="auto" autoplay="1" controls="1" id="videoPlayer" style="width: 1889px; height: 233px; ">
<source src="http://localhost:8080/epaServer/epa/documents/496.ds_webm?sessionId=5616fde4-50af-43d6-a57c-f06540b64fcb" type="video/webm">
<source src="http://localhost:8080/epaServer/epa/documents/496.ds_mp4?sessionId=5616fde4-50af-43d6-a57c-f06540b64fcb" type="video/mp4">
<div>Your browser doesn't support html5 video. <a>Upgrade Chrome</a></div>
</video>
I've also found that I can't seek any of the files even if I open them separately from the application.
I've tried to find more info on my own these are the headers chrome shows in the network tab:
Request URL:https://localhost:8443/epaServer/epa/documents/496.ds_webm?sessionId=5616fde4-50af-43d6-a57c-f06540b64fcb
Request Method:GET
Status Code:200 OK
Request Headers
Accept:/
Accept-Charset:ISO-8859-1,utf-8;q=0.7,*;q=0.3
Accept-Encoding:identity;q=1, *;q=0
Accept-Language:en-US,en;q=0.8
Connection:keep-alive
Cookie:sessionId=5616fde4-50af-43d6-a57c-f06540b64fcb
Host:localhost:8443
User-Agent:Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.168 Safari/535.19
Query String Parametersview URL encoded
sessionId:5616fde4-50af-43d6-a57c-f06540b64fcb
Response Headers
Cache-Control:private
Content-Length:1588816
Content-Type:video/webm
Date:Mon, 14 May 2012 14:23:02 GMT
Expires:Thu, 01 Jan 1970 01:00:00 CET
Server:Apache-Coyote/1.1
X-Content-Duration:17.31
>
I found the reason why it's not working on this question:
HTML5 video will not loop
Our server doesn't understand partial content right now.
As a result chrome is sending requests for content that doesn't get answered which in turn makes our video's and audio unseekable (and unloopable).
You must handle req.headers['range'] which Chrome will send to your streaming server.
Please refer to my codes below. It worked well on Chrome, Firefox, Edge and IE. I haven't test it on Safari but hopefully it also can work.
I used Sails/Nodejs backend and gridFS/mongodb database for storing Videos files as Chunks.
try {
let foundMetaFile = await GridFS.findOne({id: fileId});
if (!foundMetaFile) return res.status(400).json(Res.error(undefined, {message: `invalid ${fileId} file`}));
let fileLength = foundMetaFile['length'];
let contentType = foundMetaFile['contentType'];
// let chunkSize = foundMetaFile['chunkSize'];
if(req.headers['range']) {
// Range request, partialle stream the file
console.log('Range Reuqest');
var parts = req.headers['range'].replace(/bytes=/, "").split("-");
var partialStart = parts[0];
var partialEnd = parts[1];
var start = parseInt(partialStart, 10);
var end = partialEnd ? parseInt(partialEnd, 10) : fileLength - 1;
var chunkSize = (end - start) + 1;
console.log('Range ', start, '-', end);
res.writeHead(206, {
'Content-Range': 'bytes ' + start + '-' + end + '/' + fileLength,
'Accept-Ranges': 'bytes',
'Content-Length': chunkSize,
'Content-Type': contentType
});
}
let { mongodbConnection } = global;
let bucket = new GridFSBucket(mongodbConnection, { bucketName: 'fs' });
return new Promise ((resolve, reject) => {
let downloadStream = bucket.openDownloadStream(fileId);
downloadStream.on('error', (err) => {
console.log("Received Error stream")
res.end();
reject(err);
})
downloadStream.on('end', () => {
console.log("Received End stream");
res.end();
resolve(true);
})
console.log("start streaming");
downloadStream.pipe(res);
})
} catch (error) {
switch (error.name) {
case 'UsageError':
return res.status(400).json(Res.error(undefined, {message: 'invalid Input'}));
case 'AdapterError':
return res.status(400).json(Res.error(undefined, {message: 'adapter Error'}));
default:
return res.serverError(error);
}