My requirement is to send audio chunks captured from microphone to the server using html5 web socket connection.
Establish a web socket connection with the server.When user is speaking capture small chunks of audio and send to the server. This should happen till the user stops speaking for 10 seconds, then close the web socket connection. Again open the web socket connection when user starts speaking.
I know how to open a connection and send the audio for the first time.
I have two buttons. onClick of one button(startRecording) i will open web socket connection and i am recording the audio.
onClick of second button(stopRecording) i am sending the audio to the server.
But my problem is how to do this with out buttons and it has to record the audio till user pause and send to the server.
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
var recorder;
var audio = document.querySelector('audio');
var websocket ;
var text;
var onSuccess = function(s) {
var context = new AudioContext();
var mediaStreamSource = context.createMediaStreamSource(s);
recorder = new Recorder(mediaStreamSource);
recorder.record();
// audio loopback
// mediaStreamSource.connect(context.destination);
}
function startRecording() {
var wsURI = "url";
websocket = new WebSocket(wsURI);
websocket.onopen = function(evt) {
onOpen(evt)
};
websocket.onclose = function(evt) { onClose() };
websocket.onmessage = function(evt) { onMessage(evt) };
websocket.onerror = function(evt) { onError(evt) };
function onOpen(evt) {
var message = {
'action': 'start',
//'content-type': 'audio/l16;rate=22050'
'content-type': 'audio/wav;rate=22050'
};
websocket.send(JSON.stringify(message));
}
function onMessage(evt) {
console.log(evt.data);
//console.log(JSON.parse(evt.data))
}
}
function onClose() {
websocket.close();
}
if (navigator.getUserMedia) {
navigator.getUserMedia({audio: true}, onSuccess, onFail);
} else {
console.log('navigator.getUserMedia not present');
}
}
function stopRecording() {
recorder.stop();
recorder.exportWAV(function(s) {
//audio.src = window.URL.createObjectURL(s);
websocket.send(s);
//websocket.onclose();
});
}
Related
I am interested in building a web application, which will be served by nginx, and that will ask user access to their web camera, record for a given time period, maybe replay it to user, and store it in the server.
I am also thinking of a basic user interface. Could that be pure HTML+PHP?. Could that be python?
Similar questions here do not seem very relevant/helpful.
What would you suggest?
You can use MediaRecorder to record video from webcam.
Record a video and save its data to recordedBlobs:
function handleDataAvailable(event) {
if (event.data && event.data.size > 0) {
recordedBlobs.push(event.data);
}
}
function startRecording() {
recordedBlobs = [];
let options = { mimeType: 'video/webm;codecs=vp8' };
let types = ['video/webm;codecs=vp9', 'video/webm\;codecs=h264', 'video/webm', 'video/mpeg', ''];
for (let i in types) {
try {
if (MediaRecorder.isTypeSupported(types[i])) {
options = { mimeType: types[i] };
break;
}
}
catch (e) {
console.log('Exception while creating MediaRecorder: ${JSON.stringify(e)}');
}
}
try {
mediaRecorder = new MediaRecorder(window.stream, options);
} catch (e) {
console.error('Exception while creating MediaRecorder: ${JSON.stringify(e)}');
return;
}
mediaRecorder.onstop = (event) => {
console.log('Recorder stopped: ', event);
};
mediaRecorder.ondataavailable = handleDataAvailable;
mediaRecorder.start(10); // collect 10ms of data
}
function stopRecording() {
mediaRecorder.stop();
}
Upload the video data to your action page:
function upload() {
const blob = new Blob(recordedBlobs, { type: 'video/webm' });
var formData = new FormData();
formData.append("video", blob, fileName + ".webm");
var xhr = new XMLHttpRequest();
xhr.open("POST", "upload.aspx");
xhr.onreadystatechange = function () {
if (xhr.readyState === 4) {
if (xhr.status === 200) {
}
}
};
xhr.onerror = function () {
alert(`Network Error`);
};
xhr.send(formData);
}
You can implement the action page in any programming language, PHP, Java, Python, or C#.
No. Access to the user's webcam through a browser requires JavaScript and APIs provided to it by the browser.
Serverside programs do not run on the user's computer and thus cannot access its hardware (and Python and PHP cannot run client-side from a webpage).
i'm trying to stream a rtsp live stream through socket.io using ffmpeg (this works fine), but now i need to get that video from the socket and play it on a HTML5 video tag.
To do this i'm using MediaSurce, getting small pieces of video through the socket and then appending it to the MediaSource
This solution reproduces the video a few seconds o minutes and then suddenly stops
and it doesn't throw me any error on the Chrome console
var socket = io();
var ms = new MediaSource();
var sourceBuffer;
var queue = [];
var video = document.getElementById("video");
video.src = window.URL.createObjectURL(ms);
socket.on('start', function (response) {
console.log(response);
socket.emit('streaming', $stateParams.id);
ms.addEventListener('sourceopen', videoLoad, false);
ms.addEventListener('sourceclose', videoClosed, false);
});
function videoLoad() {
sourceBuffer = ms.addSourceBuffer('video/webm; codecs="vorbis,vp8"');
sourceBuffer.addEventListener('update', function () {
if (queue.length > 0 && !sourceBuffer.updating) {
console.log(queue.length);
sourceBuffer.appendBuffer(queue.shift());
}
});
socket.on('data', function (response) {
var bytes = new Uint8Array(response);
var blob = new Blob(bytes);
console.log(blob.size);
if (sourceBuffer.updating || queue.length > 0) {
queue.push(bytes);
} else {
sourceBuffer.appendBuffer(bytes);
}
});
}
function videoClosed(e) {
console.log('mediaSource readyState: ' + this.readyState);
}
On my chrome://media-internals/ the video players log show me a couple of time this, and then the video stops
video_buffering_state BUFFERING_HAVE_ENOUGH
I am trying to create a Web Audio API based application. So far I have multiple buffer nodes, connected to the destination.
What I trying to achieve is to be able to record the output result when the user (for example) presses a button, I tried using recorder.js but as far as I understand you need to play the graph along.
The following code depicts the issue:
<html><body><audio controls autoplay></audio>
<script type="text/javascript" src="recorder.js"> </script>
<input onclick="startRecording()" type="button" value="start recording" />
<input onclick="stopRecording()" type="button" value="stop recording and play" />
<script>
var context = new webkitAudioContext();
var request = new XMLHttpRequest();
var onFail = function(e) {
console.log('Rejected!', e);
};
var onSuccess = function(s) {
request.open('GET', 'voice.wav', true);
request.responseType = 'arraybuffer';
request.onload = function () {
var undecodedAudio = request.response;
context.decodeAudioData(undecodedAudio, function (buffer) {
// The contents of our mp3 is now an AudioBuffer
var sourceBuffer = context.createBufferSource();
sourceBuffer.buffer = buffer;
sourceBuffer.connect (context.destination);
recorder = new Recorder(sourceBuffer);
recorder.record();
sourceBuffer.start (context.currentTime);
});
};
request.send();
}
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
var recorder;
var audio = document.querySelector('audio');
function startRecording() {
if (navigator.getUserMedia) {
navigator.getUserMedia({audio: true}, onSuccess, onFail);
} else {
console.log('navigator.getUserMedia not present');
}
}
function stopRecording() {
recorder.stop();
recorder.exportWAV(function(s) {
audio.src = window.URL.createObjectURL(s);
});
}
</script></body></html>
What I want is when the user presses recording, the file is converted to resultant audio without actually playback.
I'm trying to implement this video conferencing HTML5 application. I'm not sure exactly what is going on but I followed the instructions, maybe I missed something...
I copied the HTML file (index.html) with the socket IP changed to the correct one for my server:
<!DOCTYPE html>
<html>
<head>
<title>WebRTC Demo</title>
</head>
<body>
<h1>WebRTC Demo using Socket.IO</h1>
<video id="webrtc-sourcevid" autoplay style="width: 320px; height: 240px; border: 1px solid black;"></video>
<button type="button" onclick="startVideo();">Start video</button>
<button type="button" onclick="stopVideo();">Stop video</button>
<video id="webrtc-remotevid" autoplay style="width: 320px; height: 240px; border: 1px solid black;"></video>
<button type="button" onclick="connect();">Connect</button>
<button type="button" onclick="hangUp();">Hang Up</button>
<p>Run a node.js server and adapt the address in the code.</p>
<script src="http://cdn.socket.io/stable/socket.io.js"></script>
<script>
// create socket
var socket = io.connect('localhost:1337/');
var sourcevid = document.getElementById('webrtc-sourcevid');
var remotevid = document.getElementById('webrtc-remotevid');
var localStream = null;
var peerConn = null;
var started = false;
var channelReady = false;
var mediaConstraints = {'mandatory': {
'OfferToReceiveAudio':true,
'OfferToReceiveVideo':true }};
var isVideoMuted = false;
// get the local video up
function startVideo() {
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || window.navigator.mozGetUserMedia || navigator.msGetUserMedia;
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia({video: true, audio: true}, successCallback, errorCallback);
function successCallback(stream) {
localStream = stream;
if (sourcevid.mozSrcObject) {
sourcevid.mozSrcObject = stream;
sourcevid.play();
} else {
try {
sourcevid.src = window.URL.createObjectURL(stream);
sourcevid.play();
} catch(e) {
console.log("Error setting video src: ", e);
}
}
}
function errorCallback(error) {
console.error('An error occurred: [CODE ' + error.code + ']');
return;
}
}
// stop local video
function stopVideo() {
if (sourcevid.mozSrcObject) {
sourcevid.mozSrcObject.stop();
sourcevid.src = null;
} else {
sourcevid.src = "";
localStream.stop();
}
}
// send SDP via socket connection
function setLocalAndSendMessage(sessionDescription) {
peerConn.setLocalDescription(sessionDescription);
console.log("Sending: SDP");
console.log(sessionDescription);
socket.json.send(sessionDescription);
}
function createOfferFailed() {
console.log("Create Answer failed");
}
// start the connection upon user request
function connect() {
if (!started && localStream && channelReady) {
createPeerConnection();
started = true;
peerConn.createOffer(setLocalAndSendMessage, createOfferFailed, mediaConstraints);
} else {
alert("Local stream not running yet - try again.");
}
}
// stop the connection upon user request
function hangUp() {
console.log("Hang up.");
socket.json.send({type: "bye"});
stop();
}
function stop() {
peerConn.close();
peerConn = null;
started = false;
}
// socket: channel connected
socket.on('connect', onChannelOpened)
.on('message', onMessage);
function onChannelOpened(evt) {
console.log('Channel opened.');
channelReady = true;
}
function createAnswerFailed() {
console.log("Create Answer failed");
}
// socket: accept connection request
function onMessage(evt) {
if (evt.type === 'offer') {
console.log("Received offer...")
if (!started) {
createPeerConnection();
started = true;
}
console.log('Creating remote session description...' );
peerConn.setRemoteDescription(new RTCSessionDescription(evt));
console.log('Sending answer...');
peerConn.createAnswer(setLocalAndSendMessage, createAnswerFailed, mediaConstraints);
} else if (evt.type === 'answer' && started) {
console.log('Received answer...');
console.log('Setting remote session description...' );
peerConn.setRemoteDescription(new RTCSessionDescription(evt));
} else if (evt.type === 'candidate' && started) {
console.log('Received ICE candidate...');
var candidate = new RTCIceCandidate({sdpMLineIndex:evt.sdpMLineIndex, sdpMid:evt.sdpMid, candidate:evt.candidate});
console.log(candidate);
peerConn.addIceCandidate(candidate);
} else if (evt.type === 'bye' && started) {
console.log("Received bye");
stop();
}
}
function createPeerConnection() {
console.log("Creating peer connection");
RTCPeerConnection = webkitRTCPeerConnection || mozRTCPeerConnection;
var pc_config = {"iceServers":[]};
try {
peerConn = new RTCPeerConnection(pc_config);
} catch (e) {
console.log("Failed to create PeerConnection, exception: " + e.message);
}
// send any ice candidates to the other peer
peerConn.onicecandidate = function (evt) {
if (event.candidate) {
console.log('Sending ICE candidate...');
console.log(evt.candidate);
socket.json.send({type: "candidate",
sdpMLineIndex: evt.candidate.sdpMLineIndex,
sdpMid: evt.candidate.sdpMid,
candidate: evt.candidate.candidate});
} else {
console.log("End of candidates.");
}
};
console.log('Adding local stream...');
peerConn.addStream(localStream);
peerConn.addEventListener("addstream", onRemoteStreamAdded, false);
peerConn.addEventListener("removestream", onRemoteStreamRemoved, false)
// when remote adds a stream, hand it on to the local video element
function onRemoteStreamAdded(event) {
console.log("Added remote stream");
remotevid.src = window.URL.createObjectURL(event.stream);
}
// when remote removes a stream, remove it from the local video element
function onRemoteStreamRemoved(event) {
console.log("Remove remote stream");
remotevid.src = "";
}
}
</script>
</body>
</html>
And the Javascript file (server.js) for the server (with same port number as above):
// create the http server and listen on port
var server = require('http').createServer();
var app = server.listen(1337, function() {
console.log((new Date()) + " Server is listening on port 1337");
});
// create the socket server on the port
var io = require('socket.io').listen(app);
// This callback function is called every time a socket
// tries to connect to the server
io.sockets.on('connection', function(socket) {
console.log((new Date()) + ' Connection established.');
// When a user send a SDP message
// broadcast to all users in the room
socket.on('message', function(message) {
console.log((new Date()) + ' Received Message, broadcasting: ' + message);
socket.broadcast.emit('message', message);
});
// When the user hangs up
// broadcast bye signal to all users in the room
socket.on('disconnect', function() {
// close user connection
console.log((new Date()) + " Peer disconnected.");
socket.broadcast.emit('user disconnected');
});
});
I have node.js installed. Next I installed express and socket.io:
npm install express
npm install socket.io
I then run this file with node to start the server.
node server.js
Accessing the index.html from the server gives me this error
Uncaught TypeError: Object #<Object> has no method 'connect'
This was being caused by the line var socket = io.connect('localhost:1337/'); in server.js
I have searched this error and have tried putting the socket.io file on the server and linking it as <script src="/socket.io/socket.io.js"></script>, doesn't change anything.
In index.html it has to be something like:
<script src="http://192.168.100.74:8080/socket.io/socket.io.js"></script>
And
var socket = io.connect('http://' + window.location.host + ':8080/');
My app.js:
var server = require('http').createServer();
var app = server.listen(8080);
var io = require('socket.io').listen(app);
io.sockets.on('connection', function(socket) {
socket.on('message', function(message) {
socket.broadcast.emit('message', message);
});
});
Project-
|-node_modules-
|socket.io
|app.js
|index.html
Hope this helps. I'm also new in this webRT and socket stuff.
Using getUserMedia I can capture video stream from client's webcam/camera. And using video tag I can show it on client's browser. Code:
<video autoplay></video>
<script type="text/javascript">
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
var video = $('video')[0];
var failed = function(e) {
console.log('Denied!', e);
};
if( navigator.getUserMedia ) {
navigator.getUserMedia( {video: true, audio: true}, function( stream ) {
video.src = window.URL.createObjectURL(stream);
}, failed
)
} else {
console.log( 'Not supported!' );
}
</script>
Now is it possible to send this video stream, either as a realtime feed or after user has done recording and decided to upload, to a server?
I found few examples of:
sending binary images to server over websocket
Periodically capture frame of streaming video and send that as image
MediaStreamRecorder is a WebRTC API for recording getUserMedia() streams . It allows web apps to create a file from a live audio/video session.
<video autoplay></video>
<script language="javascript" type="text/javascript">
function onVideoFail(e) {
console.log('webcam fail!', e);
};
function hasGetUserMedia() {
// Note: Opera is unprefixed.
return !!(navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia);
}
if (hasGetUserMedia()) {
// Good to go!
} else {
alert('getUserMedia() is not supported in your browser');
}
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia;
var video = document.querySelector('video');
var streamRecorder;
var webcamstream;
if (navigator.getUserMedia) {
navigator.getUserMedia({audio: true, video: true}, function(stream) {
video.src = window.URL.createObjectURL(stream);
webcamstream = stream;
// streamrecorder = webcamstream.record();
}, onVideoFail);
} else {
alert ('failed');
}
function startRecording() {
streamRecorder = webcamstream.record();
setTimeout(stopRecording, 10000);
}
function stopRecording() {
streamRecorder.getRecordedData(postVideoToServer);
}
function postVideoToServer(videoblob) {
var data = {};
data.video = videoblob;
data.metadata = 'test metadata';
data.action = "upload_video";
jQuery.post("http://www.kongraju.in/uploadvideo.php", data, onUploadSuccess);
}
function onUploadSuccess() {
alert ('video uploaded');
}
</script>
<div id="webcamcontrols">
<button class="recordbutton" onclick="startRecording();">RECORD</button>
</div>
Spec:
http://www.w3.org/TR/mediastream-recording/
you can send recorded file to server.
Take a look at this article: http://www.smartjava.org/content/face-detection-using-html5-javascript-webrtc-websockets-jetty-and-javacvopencv
It shows a use of Webrtc:
These APIs should enable building applications that can be run inside a browser, requiring no extra downloads or plugins, that allow communication between parties using audio, video and supplementary real-time communication, without having to use intervening servers (unless needed for firewall traversal, or for providing intermediary services).