What is the best strategy to choose camera for qrcode scanning?
Currently modern devices have few back cameras.
For example huawei mate 20 have 4 camera (3 physical and 1 virtual based on physical ones)
Currently my algorithm just selecting first camera with "back" in label.
Is there any better strategy for best readability of qr code?
Here is my code:
this.qrScannerComponent.getMediaDevices().then(devices => {
// this.info = devices.map((dev, i) => `${i}. ${dev.label}`).join('\n');
const videoDevices: MediaDeviceInfo[] = [];
for (const device of devices) {
if (device.kind.toString() === 'videoinput') {
videoDevices.push(device);
}
}
if (videoDevices.length > 0) {
let choosenDev;
for (const dev of videoDevices) {
if (dev.label.includes('back')) {
choosenDev = dev;
break;
}
}
if (choosenDev) {
this.qrScannerComponent.chooseCamera.next(choosenDev);
} else {
this.qrScannerComponent.chooseCamera.next(videoDevices[0]);
}
}
});
Related
After I end a WebRTC call, nothing I seem to do removes the red icon on the browser tab that says the camera or microphone are in use.
I iterate the tracks from videoElement.srcObject.getTracks() and call track.stop() on each one. I then delete the videoElement from the DOM, but still I have the red icon.
In my case, the problem was caused by a bug in my code due to my misunderstanding WebRTC and getUserMedia(). I was actually calling getUserMedia() twice, once for the local <video> element and a second time for adding to the RTCPeerConnection.
The fix was of course to only call getuserMedia() once and use the returned stream in both places.
(BroadcastChannel could not be used with stack overflow snippet, so provide sample code with Code Pen)
(I confirmed the operation on Chrome and Firefox)
Open the link in multiple tabs, check the WebRTC connection by clicking the Cnnect button on either side, and switch to the Close button, so clicking the Close button releases the Cam
https://codepen.io/gtk2k/pen/NWxzgKo?editors=1111
// open 2 tabs this page
const signalingChannel = new BroadcastChannel('signalingChannel');
let pc = null;
signalingChannel.onmessage = async evt => {
const msg = JSON.parse(evt.data);
if(msg.close) {
releaseStream();
return;
}
if(!pc)
await setupPC();
if(msg.sdp) {
console.log(`Receive ${msg.type}`);
await pc.setRemoteDescription(msg);
if(msg.type === 'offer') {
const answer = await pc.createAnswer();
await pc.setLocalDescription(answer);
sendSignaling(answer);
}
} else if(msg.candidate) {
console.log(`Receive candidate`);
await pc.addIceCandidate(msg);
}
}
async function setupPC(isCaller) {
pc = new RTCPeerConnection();
pc.onconnectionstatechange = evt => {
console.log(pc.connectionState);
if(pc.connectionState === 'disconnected')
{
releaseStream();
}
}
pc.onicecandidate = evt => {
if(evt.candidate)
sendSignaling(evt.candidate);
}
pc.ontrack = evt => {
vidRemote.srcObject = evt.streams[0];
}
const stream = await navigator.mediaDevices.getUserMedia({video:true});
stream.getTracks().forEach(track => pc.addTrack(track, stream));
vidLocal.srcObject = stream;
if(isCaller) {
const offer = await pc.createOffer();
await pc.setLocalDescription(offer);
sendSignaling(offer);
}
}
(async _ => {
const stream = await navigator.mediaDevices.getUserMedia({video:true});
vidLocal.srcObject = stream;
});
btnConnect.onclick = evt => {
if(btnConnect.textContent === 'Connect') {
btnConnect.textContent = 'Close';
setupPC(true);
} else {
btnConnect.textContent = 'Connect';
pc.close();
pc = null;
releaseStream();
sendSignaling({close: true});
}
}
function sendSignaling(data) {
signalingChannel.postMessage(JSON.stringify(data));
}
function releaseStream() {
[vidLocal, vidRemote].forEach(vid => {
if(!vid.srcObject) return;
let stream = vid.srcObject;
vid.pause();
vid.srcObject = null;
stream.getTracks().forEach(track => track.stop());
stream = null;
});
}
video {
width: 360px;
height: 240px;
}
<button id="btnConnect">Connect</button>
<div>
<video id="vidLocal" muted autoplay></video>
<video id="vidRemote" muted autoplay></video>
</div>
This line: let X = this.appGlobal.GetNavigationLanguage().data;
retuns JSON as you can see below.
I want to take NAV.REPORTS.BMAIL.TITLE.
Translate code (NAV.REPORTS.BMAIL.TITLE) is dynamically created.
X.NAV.REPORTS.BMAIL.TITLE => works
X['NAV']['REPORTS']['BMAIL']['TITLE'] => works
But keep in mind I have dynamically created translation code I need something like this:
let transCode = 'NAV.REPORTS.BMAIL.TITLE';
console.log(X[transCode]);
How I can achieve this?
test_data = {
NAV: {
REPORTS: {
BMAIL: {
TITLE: "hello"
}
}
}
}
let transCode = 'NAV.REPORTS.BMAIL.TITLE';
properties = transCode.split('.'); //--> ["NAV","REPORTS","BMAIL","TITLE"]
result = test_data
properties.forEach(function(property) {
result = result[property]
})
console.log(result) // --> hello
The short and evil route would be the following:
console.log(eval(`X.${transCode}`));
The less evil way is to use a recursive function call, this means you only look into the number of items in your string-path (rather than looping the whole collection).
const X = {
NAV: {
REPORTS: {
BMAIL: {
TITLE: 'Test'
}
}
}
}
const transCode = 'NAV.REPORTS.BMAIL.TITLE';
// Evil...
console.log(eval(`X.${transCode}`)); // Test
// Less Evil (but needs exception handling)...
function getData(input: any, splitPath: string[]) {
const level = splitPath.pop();
if (splitPath.length === 0) {
return input[level];
} else {
return getData(input[level], splitPath);
}
}
const result = getData(X, transCode.split('.').reverse());
console.log(result); // Test
I want to animate the React-native-maps {Google} markers.
I tried with the animated module, but the markers do not allow complex styles.
Is there any function to modify the coordinates of the marker and give it animation?, like a:
marker.setAnimation(google.maps.Animation.BOUNCE);
I have tried with:
<MapView.Marker.Animated>
But I can not create the effect. Is there a function that edits the coordinates as an animation drop?
React native map marker is by default "not animated", it can not accept gif images, sprites, animation api and so on . . However, I was able to animate it the tough way through image transition. Here is my example:
constructor(props, context) {
super(props, context);
this.state = {
startTransition: 1,
endTransition: 4,
};
}
componentDidMount() {
this.animate();
}
animate() {
const {startTransition, endTransition} = this.state;
if(startTransition < endTransition){
let currentTransition = startTransition + 1;
this.setState({startTransition: currentTransition});
} else {
this.setState({startTransition: 1});
}
let x = setTimeout(()=>{
this.animate()
}, 500);
}
renderImg(imgTrans) {
if(imgTrans === 1) {
return require('./walk1.png');
}
if(imgTrans === 2) {
return require('./walk2.png');
}
if(imgTrans === 3) {
return require('./walk3.png');
}
if(imgTrans === 4) {
return require('./walk4.png');
}
}
render() {
const {startTransition} = this.state;
return (
<MapView.Marker
coordinate={tapCoords}
image={this.renderImg(startTransition)}
>
)
}
This is how I did the animation for now.
Did somebody try HTML5 video for capturing image from camera on mobile and have problem with blurry image? I try it on Samsung phone where image was sharp, and on LG G4 where image was blurry (android browser and Chrome on both mobile phones).
Has somebody this experience or can explain solution? Can some phones has these problems?
There is snippet of code which I use:
function startCamera(newSource) {
if (newSource) {
var constraints = {
audio: false,
video: {
optional: [
{ sourceId: newSource },
]
}
};
addVideoAndCanvas();
navigator.getUserMedia(constraints, onMediaSuccess, onMediaError);
}
}
function addVideoAndCanvas() {
self.video = $('<video muted autoplay>');
self.canvas = $('<canvas id="qr-canvas" class="hide">');
$('.modal-body .video-container', self.root).append(self.video).append(self.canvas);
}
function onMediaSuccess(stream) {
self.stream = stream;
self.video[0].src = (window.URL && window.URL.createObjectURL(stream)) || stream;
self.video[0].onloadeddata = function () {
self.canvas[0].height = self.video[0].videoHeight;
self.canvas[0].width = self.video[0].videoWidth;
scan();
}
}
function onMediaError(error) {
}
function scan() {
if (self.stream) {
try {
var ctx = self.canvas[0].getContext('2d');
ctx.drawImage(self.video[0], 0, 0);
} catch (e) {
setTimeout(scan, 20);
}
} else {
setTimeout(scan, 20);
}
}
Probably waiting on folowing spec: https://w3c.github.io/mediacapture-image/index.html#FocusMode
I'm using background file transfer and background audio player.
in the TransferStatusChanged event i save the file to the isolated storage then play it with the audio player,
It works fine if the application is active and i want to do the same if the application is not active.
In WP8.0 it isn't possible as your TransferStatusChanged is in your App process which is stopped when you navigate away from it:
When the user navigates forward, away from an app, after the Deactivated event is raised, the operating system will attempt to put the app into a dormant state. In this state, all of the application’s threads are stopped and no processing takes place, but the application remains intact in memory.
You can make it work under LockScreen by disabling IdleDetection but it won't work when your App is put into dormant/tombstoned state.
You may consider to start playing when you activate the App or put some logic in BAP where you can check for example upon TrackChange if the file was downloaded and then play it.
edit
thanks to #Romasz suggestion of use empty track to play while the file complete downloading
then in TrackChange i check if the file downloaded and remove it from the download queue -this can be done in the audio background- things work fine now. here is some code
private AudioTrack GetNextTrack(bool isStart = false)
{
AudioTrack track = null;
using (IsolatedStorageFile ISF = IsolatedStorageFile.GetUserStoreForApplication())
{
if (isStart)
{
currentSongIndex = -1;
}
currentSongIndex++;
if (currentSongIndex == _ProgressList.Count())
{
//currentSongIndex = 0;
return track;
}
if (ISF.FileExists("shared/transfers/" + _ProgressList[currentSongIndex].FileName))
{
var request = requests.Where(it => it.Key == _ProgressList[currentSongIndex].FileName).FirstOrDefault().Value;
if (request != null)
{
bool isDone = ProcessTransfer(request, _ProgressList[currentSongIndex].Directory);
if(!isDone )
{
if(request.BytesReceived > LastDownloadedSize)
{
NumberOfTrialsToLoadNextTrack = 0;
LastDownloadedSize = request.BytesReceived;
}
else
{
++NumberOfTrialsToLoadNextTrack;
}
}
}
}
else
{
++NumberOfTrialsToLoadNextTrack;
}
if (ISF.FileExists(_ProgressList[currentSongIndex].Directory+_ProgressList[currentSongIndex].FileName))
{
track = playList[currentSongIndex];
NumberOfTrialsToLoadNextTrack = 0;
LastDownloadedSize = 0;
}
else
{
currentSongIndex--;
if (NumberOfTrialsToLoadNextTrack < 10)
{
track = new AudioTrack(new Uri("halfsec.mp3", UriKind.Relative),
"empty",
"empty",
"empty",
null);
}
}
}
return track;
}
private bool ProcessTransfer(BackgroundTransferRequest transfer, string directory = "")
{
bool isDone = false;
switch (transfer.TransferStatus)
{
case TransferStatus.Completed:
if (transfer.StatusCode == 200 || transfer.StatusCode == 206)
{
RemoveTransferRequest(transfer.RequestId);
using (IsolatedStorageFile isoStore = IsolatedStorageFile.GetUserStoreForApplication())
{
string filename = transfer.Tag;
System.Diagnostics.Debug.WriteLine(directory + filename);
if (isoStore.FileExists(directory + filename))
{
isoStore.DeleteFile(directory + filename);
}
if (isoStore.FileExists(transfer.DownloadLocation.OriginalString))
{
isoStore.MoveFile(transfer.DownloadLocation.OriginalString, directory + filename);
}
}
isDone = true;
}
else
{
RemoveTransferRequest(transfer.RequestId);
if (transfer.TransferError != null)
{
}
}
break;
}
return isDone;
// NotifyComplete();
}
private void RemoveTransferRequest(string transferID)
{
// Use Find to retrieve the transfer request with the specified ID.
BackgroundTransferRequest transferToRemove = BackgroundTransferService.Find(transferID);
// try to remove the transfer from the background transfer service.
try
{
BackgroundTransferService.Remove(transferToRemove);
}
catch (Exception)
{
}
}
protected override void OnPlayStateChanged(BackgroundAudioPlayer player, AudioTrack track, PlayState playState)
{
switch (playState)
{
case PlayState.TrackEnded:
player.Track = GetNextTrack();
break;
case PlayState.TrackReady:
player.Play();
break;
case PlayState.Shutdown:
// TODO: Handle the shutdown state here (e.g. save state)
break;
case PlayState.Unknown:
break;
case PlayState.Stopped:
break;
case PlayState.Paused:
break;
case PlayState.Playing:
break;
case PlayState.BufferingStarted:
break;
case PlayState.BufferingStopped:
break;
case PlayState.Rewinding:
break;
case PlayState.FastForwarding:
break;
}
NotifyComplete();
}
this code in the AudioPlayer class, the files added to the download queue in the xaml.cs normally