Embed Youtube on a Webpage to be viewed from Smart TV - html

Simple question...
I am making a small webpage that should be viewed from a SmartTV. This page serves some Ajax calls etc.
But the site also shows a Youtube video. The problem is that Youtube video shows up as Flash content. The smart TV doesn't play flash, so I'm in trouble.
Anyone have any idea how to handle this?

Yes, I wanted to do the exact same thing. This is how implemented this logic. And it is working fine for me.
HTML:
<div id="webElement" style="display: none;"></div>
JS:
When ajax call is received (contains many files of type YouTube/WebPages/MP4 files):
if (Utility.isYouTube(dr.FileName)) {
dr.IsWeb = true;
dr.WebUrl = "http://www.youtube.com/embed/" + Utility.getParameterByName('v', dr.FileName) + "?rel=0&autoplay=1";
}
This is how I played it (if webpage/Youtube):
var webElement = jQuery("#webElement");
webElement.empty();
var obj = "<object type='text/html' width='1920' height='1080' data='" + dr.WebUrl + "' onerror='Player.onWebElementError();'> </object>";
webElement.append(obj);
webElement.show();
Hope it will help you to achieve what you want.

Samsung SmartTVs do use Flash. It depends on which TV you want your app to run. If you are developing for 2011 it will be a no go, since 2011 SmartTV models only support AS2.0 and YouTube has stopped supporting AS2.0 for their API.
For 2012 and 2013 models you can easily embed a YouTube video by doing:
var attributes = { id: "myytplayer" }, params = { allowScriptAccess: "always", allowFullScreen: "true"};
swfobject.embedSWF('http://www.youtube.com/apiplayer?enablejsapi=1&version=2&fs=1', "ytplayer",400, 600, "6", null, null, params, attributes);
Then to load a video:
ytp.getVideo('http://www.youtube.com/watch?v=ubAjEjkJIJo');
ytp.getVideo = function (url) {
//loads video by url
ytp.ytp_ref.cueVideoByUrl(url);
};

Related

Phaser HTML5 app cannot play sound after porting by Phonegap Cloud Build

This's a simple Phaser audio example. It works well on my Android web browser. However, it's muted after porting to Android app by Phonegap cloud build.
I know how to play sound (and loop) in Phonegap app (How to loop a audio in phonegap?) but don't know how to apply it into the Phaser JS framework.
Here's the ported app. I can install and run it but without sound. Do I miss something or Phonegap Cloud Build does support the WebAudio in Phaser JS?
https://build.phonegap.com/apps/1783695/
My config.xml is:
<?xml version="1.0" encoding="UTF-8" ?>
<widget id="com.phaser.phasersound" version="1.0.0" xmlns="http://www.w3.org/ns/widgets" xmlns:gap="http://phonegap.com/ns/1.0">
<name>Phaser sound complete</name>
<description>
Phaser sound phonegap
</description>
<gap:plugin name="org.apache.cordova.media" />
<icon src="icon.png" />
<preference name="splash-screen-duration" value="1"/>
<!--
If you do not want any permissions to be added to your app, add the
following tag to your config.xml; you will still have the INTERNET
permission on your app, which PhoneGap requires.
-->
<preference name="permissions" value="none"/>
</widget>
The source code is: (I changed the local audio files from local to github links to run on code snippet)
var game = new Phaser.Game(600, 800, Phaser.AUTO, 'phaser-example', { preload: preload, create: create });
function preload() {
game.scale.scaleMode = Phaser.ScaleManager.SHOW_ALL;
//have the game centered horizontally
game.scale.pageAlignHorizontally = true;
game.scale.pageAlignVertically = true;
game.stage.backgroundColor = '#414040';
// I changed the local audio files from local to github links to run on code snippet
/*
game.load.audio('explosion', 'assets/audio/SoundEffects/explosion.mp3');
game.load.audio('sword', 'assets/audio/SoundEffects/sword.mp3');
game.load.audio('blaster', 'assets/audio/SoundEffects/blaster.mp3');
*/
game.load.audio('explosion', 'https://raw.githubusercontent.com/nguoianphu/phaser-sound-complete-phonegap/master/www/assets/audio/SoundEffects/explosion.mp3');
game.load.audio('sword', 'https://raw.githubusercontent.com/nguoianphu/phaser-sound-complete-phonegap/master/www/assets/audio/SoundEffects/sword.mp3');
game.load.audio('blaster', 'https://raw.githubusercontent.com/nguoianphu/phaser-sound-complete-phonegap/master/www/assets/audio/SoundEffects/blaster.mp3');
}
var explosion;
var sword;
var blaster;
var text;
var text1;
var text2;
var text3;
function create() {
var style = { font: "65px Arial", fill: "#52bace", align: "center" };
text = game.add.text(game.world.centerX, 100, "decoding", style);
text.anchor.set(0.5);
explosion = game.add.audio('explosion');
sword = game.add.audio('sword');
blaster = game.add.audio('blaster');
// Being mp3 files these take time to decode, so we can't play them instantly
// Using setDecodedCallback we can be notified when they're ALL ready for use.
// The audio files could decode in ANY order, we can never be sure which it'll be.
game.sound.setDecodedCallback([ explosion, sword, blaster ], start, this);
}
var keys;
function start() {
text.text = 'Press 1, 2 or 3';
var style = { font: "48px Arial", fill: "#cdba52", align: "center" };
text1 = game.add.text(game.world.centerX, 250, "Blaster: Stopped", style);
text1.anchor.set(0.5);
text2 = game.add.text(game.world.centerX, 350, "Explosion: Stopped", style);
text2.anchor.set(0.5);
text3 = game.add.text(game.world.centerX, 450, "Sword: Stopped", style);
text3.anchor.set(0.5);
explosion.onStop.add(soundStopped, this);
sword.onStop.add(soundStopped, this);
blaster.onStop.add(soundStopped, this);
keys = game.input.keyboard.addKeys({ blaster: Phaser.Keyboard.ONE, explosion: Phaser.Keyboard.TWO, sword: Phaser.Keyboard.THREE });
keys.blaster.onDown.add(playFx, this);
keys.explosion.onDown.add(playFx, this);
keys.sword.onDown.add(playFx, this);
// And for touch devices you can also press the top, middle or bottom of the screen
game.input.onDown.add(onTouch, this);
}
function onTouch(pointer) {
var b = game.height / 3;
if (pointer.y < b)
{
playFx(keys.blaster);
}
else if (pointer.y > b * 2)
{
playFx(keys.sword);
}
else
{
playFx(keys.explosion);
}
}
function playFx(key) {
switch (key.keyCode)
{
case Phaser.Keyboard.ONE:
text1.text = "Blaster: Playing";
blaster.play();
break;
case Phaser.Keyboard.TWO:
text2.text = "Explosion: Playing";
explosion.play();
break;
case Phaser.Keyboard.THREE:
text3.text = "Sword: Playing";
sword.play();
break;
}
}
function soundStopped(sound) {
if (sound === blaster)
{
text1.text = "Blaster: Complete";
}
else if (sound === explosion)
{
text2.text = "Explosion: Complete";
}
else if (sound === sword)
{
text3.text = "Sword: Complete";
}
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/phaser/2.4.4/phaser.js"></script>
UPDATE 2015-12-01
Here is my completed source code. It has both .mp3 and .ogg sound files. You can play them on Android native browser (tested on 4.4.4 Samsung E5).
Source: https://github.com/nguoianphu/phaser-sound-complete-phonegap
Here is the ported app on Phonegap. It can display the screen but can't play sounds.
https://build.phonegap.com/apps/1783695/builds
You are trying to play the audio with the webview library. It is likely using the HTML5 API for audio or webaudio. If it is neither of these, then you need to ask the author.
Next, it is not best practice to use external source (http:). Your assests (javascript, css, audio files, etc) should live on the device. If you load files from the web, then the sound quality could be poor (or the audio may not play at all - see whitelist below). Load from the device.
Android 4.4.4 is Kitkat. The standard webview library was exchanged for the chromium version. This means your audio library might be confused about this or you need to give the library knowledge about this library. This also means your code may not work on devices before 4.4.4. (Mostly, because you cannot test it.)
The link you point to is likely using the core media plugin, even though they dont say so. In addition, the post is over 3 years old. Many thing have changed since them. NOTE: you have installed the media plugin in your config.xml. This is likely why your loop works.
You should start over. You've made many errors. In addition, to all that you have, You will need to implement the whitelist plugin (if you are going to import files, or talk to the network).
FIRST TRY this sample app - example plays on Android and iOS. You can download the Android version and test it. The iOS version requires I have your UUID compiled in.
There are 16 audio plugins you can choose from. I know a few do real time audio playback and have better control than the "core" plugin.
You should read:
Top Mistakes by Developers new to Cordova/Phonegap - read the bold sentences.
HOW TO apply the Cordova/Phonegap the whitelist system
HOWTO Core Plugins Setup
Phonegap--Generic-Boilerplate7 - just wrote this. It works.
Phonegap Demo Apps
Phonegap-Media-Test - source code for the example that plays on Android and iOS. You can download the Android version and test it.
UPDATE: 2015-12-01 - 2am Previously, I had forgotten to add a wild-card (*) to the CSP meta tag. I am now including this. This meta tag should be added to the header of the index.html file that is playing the audio.
NOTE YOUR APP IS NOW INSECURE. IT IS UP TO YOU TO SECURE YOUR APP.
<meta http-equiv="Content-Security-Policy"
content="default-src *;
style-src * 'self' 'unsafe-inline' 'unsafe-eval';
script-src * 'self' 'unsafe-inline' 'unsafe-eval';">
UPDATE: 2015-12-01 - 3pm
#Tuan, I've applied all the fixes as outlined in
HOW TO apply the Cordova/Phonegap the whitelist system
HOWTO Core Plugins Setup
Phonegap--Generic-Boilerplate7 - just wrote this. It works.
The audio is now working on my Android LG Leon/Android 5.1.1
Truthfully, I would never do this on my own, but your code had enough working that after I tested it on my firefox(v34) browser, I was fairly certain it would work.
UPDATE: 2016-04-15
The code has been removed. Ask in the comments, if you need code.
There should be enough code in place for you to work off of.
- Code
- Working Android App
- Phonegap Build Documentation

Can I use a local file as a source in a live page?

I like to use JSFiddle when designing a new interface because I find it convenient for various tools within. I'm working on the front end of a site where I want to use a video, and unlike an image, I cant just throw it up on imgur and link to it for free instant hosting while I fiddle with the interface design.
So I want to know if I can somehow use a local file on my PC as the source for an HTML video element hosted on a live site. Obviously this is trivial to do with a web project being worked on on my Desktop, but I'm not sure it can be done on a live test.
For example this would work on a page I open from my desktop, living on my PC:
<video id="Video-Player">
<source src="../movie.mp4" type="video/mp4"/>
</video>
But I don't know whether I can do the equivalent with a page living on the web.
Here's how to allow a user to select an image from their local machine. This should get you started in the right direction.
Add a file input button in the HTML
<input type="file" id="file-btn"/>
and the corresponding handler
document.getElementById('file-btn').addEventListener('change', function(e){
readFiles(e.target.files);
})
Then the code to read the files
function readFiles(files){
files = [].slice.call(files); //turning files into a normal array
for (var file of files){
var reader = new FileReader();
reader.onload = createOnLoadHandler(file);
//there are also reader.onerror reader.onloadstart, reader.onprogress, and reader.onloadend handlers
reader.readAsDataURL(file);
}
}
Now, I've only done this with images, but this is how I read the image data.
function createOnLoadHandler(file){
console.log('reading ' + file.name + ' of type ' + file.type)
function onLoad(e){
var data = e.target.result
display(data);
}
return onLoad
}
function display(data){
var img = document.createElement('img');
img.src = data;
var context = canvas.getContext('2d')
context.clearRect(0, 0, WIDTH, HEIGHT);
context.drawImage(img, 0, 0, WIDTH, HEIGHT);
}
Here is a demo of the above code.
As a side note, if you try to read images from another domain you'll run into cross origin policy issues. I would think the same problem exists for videos as well.

Vimeo force CC language

Trying to embed a Vimeo video into my website and I have put about 5 different languages into the CC of the video on Vimeo. However I don't want the user to have to change their language in the CC drop down in the Vimeo embed, I would like to assign it in HTML/JavaScript (using geolocation to select their base language) then they can change their CC language accordingly once the video has started playing.
You can use the enableTextTrack function on a player initialized by the JS API provided by Vimeo:
// Select with the DOM API
var iframe = document.querySelector('iframe');
var iframePlayer = new Vimeo.Player(iframe);
player.enableTextTrack('en').then(function(track) {
// track.language = the iso code for the language
// track.kind = 'captions' or 'subtitles'
// track.label = the human-readable label
}).catch(function(error) {
switch (error.name) {
case 'InvalidTrackLanguageError':
// no track was available with the specified language
break;
case 'InvalidTrackError':
// no track was available with the specified language and kind
break;
default:
// some other error occurred
break;
}
});
More information on the github of Vimeo player JS API: https://github.com/vimeo/player.js#enabletexttracklanguage-string-kind-string-promiseobject-invalidtracklanguageerrorinvalidtrackerrorerror
We don't have this yet, but we do plan on offering some way to do it with an embed parameter and through the JavaScript API in the future.

Nodejs + Firefox behavior in relation to the HTML5 <audio> element

I am using Nodejs and Firefox to display a web page. That page has a HTML5 audio element. The problem I have is related to the calcul of the audio duration.
In my node js script I have:
var app = require('http').createServer(handler)
, path = require("path")
, io = require('socket.io').listen(app)
, fs = require('fs')
, exec = require('child_process').exec
, spawn = require ('child_process').spawn
, util = require('util')
, extensions = {
".html": "text/html",
".css": "text/css",
".js": "application/javascript",
".png": "image/png",
".gif": "image/gif",
".ttf": "application/x-font-ttf",
".jpg": "image/jpeg",
".mp3": "audio/mp3",
".wav": "audio/wav",
".ogg": "audio/ogg"
}
, Files = {};
In my html web page I have:
<audio id="idaudio" src="" type="audio/wav" >Your browser does not support the audio element.</audio>
And some javascript code from my web page:
var thissound=document.getElementById("idaudio");
thissound.src="http://localhost/Audio/Song.wav";
//thissound.src="/Audio/Song.wav";
thissound.addEventListener('loadeddata', function() {
var durationaudio = (thissound.duration)*1000;
});
When I check the durationaudio I get the right number and then I can play the song using thissound.play(); This code is working in Firefox and Chromium.
If I change
thissound.src="http://localhost/Audio/Song.wav" -> thissound.src="/Audio/Song.wav"
Adding the ".wav": "audio/wav" extension in the node script, I can play the Song using Firefox and Chromium; In Chromium I get also the right number of the durationaudio but using Firefox I get a durationaudio=Infinity. This is the problem. I dont know why Firefox is not able to get the right duration. Maybe I have to add some extension ... in the node script in order to allow Firefox to get the duration of the audio. Any ideas?

Using html5 to capture microphone input on mobile chrome

I am trying to record from the microphone using HTML5 in Chrome 29 Beta for Android (it has enabled web audio support in its beta 29). In the code below, ProcessAudio is the web audio filter function, in which I receive the input buffer from the microphone. I get the correct sample size. However, the audio pcm samples in mobile chrome are always zero. Does chrome disable the input to the audio filters in its mobile version? Has anybody got audio recording working using HTML5 in chrome mobile version?
The following code works fine in chrome (desktop version).
<!DOCTYPE HTML>
<html>
<head>
<script>
function GetMedia(obj, fnSuccess, fnFailure)
{
if(navigator.getUserMedia)
{
return navigator.getUserMedia(obj, fnSuccess, fnFailure);
}
else if(navigator.webkitGetUserMedia)
{
return navigator.webkitGetUserMedia(obj, fnSuccess, fnFailure);
}
else if(navigator.mozGetUserMedia)
{
return navigator.mozGetUserMedia(obj, fnSuccess, fnFailure);
}
else if(navigator.msGetUserMedia)
{
return navigator.msGetUserMedia(obj, fnSuccess, fnFailure);
}
alert("no audio capture");
}
var incrementer = 0;
function ProcessAudio(e)
{
var inputBuffer = e.inputBuffer.getChannelData(0);
var outputBuffer = e.outputBuffer.getChannelData(0);
outputBuffer.set(inputBuffer, 0);
document.getElementById("display").innerText =
incrementer + " " + inputBuffer[0];
incrementer++;
}
var context = null;
function Success(localMediaStream)
{
context = new window.webkitAudioContext();
var microphone = context.createMediaStreamSource(localMediaStream);
var node = context.createScriptProcessor(4096, 1, 1);
node.onaudioprocess = ProcessAudio;
microphone.connect(node);
node.connect(context.destination);
}
function Error(err)
{
alert("no audio support");
}
function load(e)
{
GetMedia({audio:true,video:false}, Success, Error);
}
</script>
</head>
<body onload="load(event)">
<div>Audio Player</div>
<div id="display"></div>
<video id="localvideo" autoplay="autoplay" style="opacity:1"></video>
</body>
</html>
We now have Web Audio input working in Chrome for Android Beta (31.0.1650.11).
There's an issue with inputBuffer in Chrome beta for Android right now - it always contains zeros (at least on those devices that I tested) as you mentioned - confirmed. That ain't much of help but for demo purposes the best I could achieve right now is playing "live" input stream via <audio> object. See here. Hit "record" and then start playing the audio object. The rest beyond that is TARFU in the current beta. Guess I'm waiting for G-folks to patch it asap. What I found out though, is that zingaya.com works fine (that is, records your audio and plays it back to you) on Chrome Beta for Android. Just try them out and they'll play back your stream. Apparently, this is made by leveraging WebRTC. In other words you evidently can record an audio stream that way, I haven't figured out how to do it just yet. Post it up if you come up with something. G-guys are working fast on the other hand - they've released a new beta version less than a week ago that at least can play audio fetched with XHR. The version prior to that couldn't do that even.