I am getting pretty weird behavior using HTML 5 drag and drop functionality in Safari.
My code looks something like this, nothing fancy:
handleFiles = function (files, e) {
// Traverse throught all files and check if uploaded file type is image
var imageType = /image.*/;
var file = files[0];
// check file type
if (!file.type.match(imageType)) {
alert("File \"" + file.name + "\" is not a valid image file. ");
return false;
}
// check file size
if (parseInt(file.size / 1024) > 2050) {
alert("File \"" + file.name + "\" is too big.");
return false;
}
uploadFile(file);
};
uploadFile = function (file) {
xhr = new XMLHttpRequest();
xhr.open("post", "fileuploadhandler.ext", true);
xhr.upload.addEventListener("progress", function (event) {
if (event.lengthComputable) {
$("#progress").css("width", (event.loaded / event.total) * 100 + "%");
$(".percents").html(" " + ((event.loaded / event.total) * 100).toFixed() + "%");
$(".up-done").html((parseInt(event.loaded / 1024)).toFixed(0));
} else {
alert("Failed to compute file upload length");
}
}, false);
xhr.onreadystatechange = function (oEvent) {
if (xhr.readyState === 4) {
if (xhr.status === 200) {
logAction(oEvent);
$("#progress").css("width", "100%");
$(".percents").html("100%");
$(".up-done").html((parseInt(file.size / 1024)).toFixed(0));
fileUploadedComplete(true, oEvent);
} else {
fileUploadedComplete(false, oEvent);
alert("Error" + xhr.statusText);
}
}
};
// Set headers
xhr.setRequestHeader("Content-Type", "multipart/form-data");
xhr.setRequestHeader("X-File-Name", file.name);
xhr.setRequestHeader("X-File-Size", file.fileSize);
xhr.setRequestHeader("X-File-Type", file.type);
// Send the file (doh)
xhr.send(file);
};
This works fine in Chrome and Firefox,
Any help would be appreciated!
Best,
tribe84
I can't replicate your error on Safari for OS X.
Perhaps the problem is related to Safari on Windows. Probably, not the best idea to test for Safari on Windows.
Here's a working JSFiddle: http://jsfiddle.net/wh3E5/
Related
I am a bit ashamed with the problem I am currently encountering but I don't understand how to load OpenData from this portal (Toulouse Open Data) which returns a json with a rather original architecture ...
My code which send the request is like that :
var myXHR = new XMLHttpRequest();
myXHR.open('GET', 'https://data.toulouse-metropole.fr/api/v2/catalog/datasets/recensement-population-2012-grands-quartiers-logement/records?rows=100&pretty=false&timezone=UTC');
myXHR.send(null);
myXHR.addEventListener('progress', function (e) {
console.log(e.loaded + ' / ' + e.total);
});
myXHR.addEventListener('readystatechange', function () {
if (myXHR.readyState === XMLHttpRequest.DONE) {
if (myXHR.status === 200) {
var myResponse = JSON.parse(myXHR.responseText);
console.log("myXHR", myResponse);
} else {
console.log("myXHR", myXHR.statusText);
}
}
});
Thank you for your help ! =)
My problem is that i need to upload big videos files, but not that big. like 100~500mb. videos with 70mb i can do it. when i try to upload a large file, firefox crashes and closes. the same happen with chrome. I'm using html5uploader.js.
/*
* Upload files to the server using HTML 5 Drag and drop the folders on your local computer
* Tested on: Mozilla Firefox 3.6.12, Google Chrome 7.0.517.41, Safari 5.0.2, WebKit r70732
* The current version does not work on: Opera 10.63, Opera 11 alpha, IE 6+.
*/
function uploader(place, status, targetPHP, show) {
// Upload image files
upload = function(file) {
if((file.type == 'image/jpeg' || file.type == 'audio/mpeg' || file.type =='image/png' || file.type == 'image/bmp' || file.type == 'image/gif')){
alert('Arquivo não permitido.');
return false;
}
// Firefox 3.6, Chrome 6, WebKit
if(window.FileReader) {
// Once the process of reading file
this.loadEnd = function() {
bin = reader.result;
xhr = new XMLHttpRequest();
xhr.open('POST', targetPHP+'?up=true', true);
var boundary = 'xxxxxxxxx';
var body = '--' + boundary + "\r\n";
// body += "Content-Disposition: form-data; name='upload'; filename='" + file.name + "'\r\n";
body += "Content-Disposition: form-data; name='upload'; filename='" + encodeURI(file.name) + "'\r\n";
body += "Content-Type: " + file.type + "\r\n\r\n";
body += bin + "\r\n";
body += '--' + boundary + '--';
xhr.setRequestHeader('content-type', 'multipart/form-data; boundary=' + boundary);
// Firefox 3.6 provides a feature sendAsBinary ()
if(xhr.sendAsBinary != null) {
xhr.sendAsBinary(body);
// Chrome 7 sends data but you must use the base64_decode on the PHP side
} else {
xhr.open('POST', targetPHP+'?up=true&base64=true', true);
xhr.setRequestHeader('UP-FILENAME', file.name);
xhr.setRequestHeader('UP-SIZE', file.size);
xhr.setRequestHeader('UP-TYPE', file.type);
xhr.send(window.btoa(bin));
}
if (show) {
var newFile = document.createElement('div');
tamanho = (((file.size)/1024)/1024);
tamanhoMB = tamanho.toFixed(2);
newFile.innerHTML = 'Nome do arquivo: ' + file.name+ ' | Tamanho: '+tamanhoMB+' MB';
document.getElementById(show).appendChild(newFile);
}
if (status) {
document.getElementById(status).innerHTML = 'Upload concluído! Arraste o próximo arquivo.';
}
};
// Loading errors
this.loadError = function(event) {
switch(event.target.error.code) {
case event.target.error.NOT_FOUND_ERR:
document.getElementById(status).innerHTML = '<h4>Arquivo não encontrado!</h4>';
break;
case event.target.error.NOT_READABLE_ERR:
document.getElementById(status).innerHTML = '<h4>Arquivo não legível!</h4>';
break;
case event.target.error.ABORT_ERR:
break;
default:
document.getElementById(status).innerHTML = '<h4>Erro de leitura.</h4>';
}
};
// Reading Progress
this.loadProgress = function(event) {
if (event.lengthComputable) {
var percentage = Math.round((event.loaded * 100) / event.total);
document.getElementById(status).innerHTML = '<h4>Carregando... '+percentage+'%</h4>';
}
};
// Preview images
this.previewNow = function(event) {
bin = preview.result;
var img = document.createElement("img");
img.className = 'addedIMG';
img.file = file;
img.src = bin;
document.getElementById(show).appendChild(img);
};
reader = new FileReader();
// Firefox 3.6, WebKit
if(reader.addEventListener) {
reader.addEventListener('loadend', this.loadEnd, false);
if (status != null)
{
reader.addEventListener('error', this.loadError, false);
reader.addEventListener('progress', this.loadProgress, false);
}
// Chrome 7
} else {
reader.onloadend = this.loadEnd;
if (status != null)
{
reader.onerror = this.loadError;
reader.onprogress = this.loadProgress;
}
}
var preview = new FileReader();
// Firefox 3.6, WebKit
if(preview.addEventListener) {
preview.addEventListener('loadend', this.previewNow, false);
// Chrome 7
} else {
preview.onloadend = this.previewNow;
}
// The function that starts reading the file as a binary string
reader.readAsBinaryString(file);
// Preview uploaded files
if (show) {
preview.readAsDataURL(file);
}
// Safari 5 does not support FileReader
} else {
xhr = new XMLHttpRequest();
xhr.open('POST', targetPHP+'?up=true', true);
xhr.setRequestHeader('UP-FILENAME', file.name);
xhr.setRequestHeader('UP-SIZE', file.size);
xhr.setRequestHeader('UP-TYPE', file.type);
xhr.send(file);
if (status) {
document.getElementById(status).innerHTML = '100% carregado';
}
if (show) {
var newFile = document.createElement('div');
newFile.innerHTML = file.name+' (Tamanho: '+file.size+' Bytes)';
document.getElementById(show).appendChild(newFile);
}
}
};
// Function drop file
this.drop = function(event) {
event.preventDefault();
var dt = event.dataTransfer;
var files = dt.files;
if (files.length<=2){
for (var i = 0; i<files.length; i++) {
var file = files[i];
upload(file);
}
}else{
alert('Limite de upload excedido! Máximo permitido: dois arquivos.');
}
};
// The inclusion of the event listeners (DragOver and drop)
this.uploadPlace = document.getElementById(place);
this.uploadPlace.addEventListener("dragover", function(event) {
event.stopPropagation();
event.preventDefault();
}, true);
this.uploadPlace.addEventListener("drop", this.drop, false);
}
i read a lot of questions and posts with a similar problem, but nothing seems to work here. i dont know what else i can do. i change the variables to these values both on wamp\bin\php\php5.3.5\php.ini, wamp\bin\apache\Apache2.2.17\bin\php.ini and wamp\bin\php\php5.3.5\phpForApache.ini:
upload_max_filesize -> 1024mb,
post_max_size -> 1024mb,
max_execution_time -> 600,
max_input_time -> 600,
memory_limit -> 2048mb.
Restart wamp, didnt work. On wamp\bin\apache\Apache2.2.17\conf\extra\httpd-vhosts.conf i put the following:
<Directory "D:\wamp\www\projec">
LimitRequestBody 102400
Options Indexes FollowSymLinks Includes ExecCGI
AllowOverride All
Order allow,deny
Allow from all
</Directory>
Also didnt work. anyone has a solution or idea to save me? I really appreciate. Thanks.
I'm trying to build a JSON out of multiple requests on my mongodb.
since I'm not using DBRef, I have to build the "table joints" by myself, and that's how I ended up in this mess.
This is the code that is giving me the headaches from a couple of days now.
(the mongo part is done with mongoskin)
var getUserFeed = function(thelimit, out) {
userfeed = db.collection("userfeed");
apparel = db.collection("apparel");
store = db.collection("stores");
if(thelimit)
args = {limit:thelimit, sort: [['date',-1]]};
userfeed.find({},args).toArray(function(e, feed) {
if (e) console.log("error: ", e);
// gather aparel infos
var i=0;
var ret_feeds = [];
feed.forEach(function(cur_feed) {
var outfits=[];
console.log("beginning with: " + cur_feed.url);
var resfeed = "";
resfeed = cur_feed;
resfeed.url = baseurl + snapurl + resfeed.url + "_small.jpg";
i=0;
cur_feed.apparel_ids.forEach(function(item) {
/*>>*/ apparel.find({"_id": item},{limit:1}).toArray(function(e, results) {
console.log(">>>>>>>>>>> APPAREL_FIND { i:" + i + "}");
if (e) console.log("error: ", e);
results = results[0];
if(results.apparel_cat == 1)
url_subcat = "pants/";
else if(results.apparel_cat == 2)
url_subcat = "shirts/";
else if(results.apparel_cat == 2)
url_subcat = "tshirts/";
results.thumb = baseurl + outfiturl + url_subcat + results.apparel_id + "/front.jpg";
results.size = "M"; ///// TOBE REAL VERY SOON
results.gallery = [
baseurl + outfiturl + url_subcat + results.apparel_id + "/model.jpg",
baseurl + outfiturl + url_subcat + results.apparel_id + "/front.jpg"
];
outfits.push(results); // quick and dirty, 2 b refined..
i++;
if(i>=cur_feed.apparel_ids.length)
{
// pack it up
// resfeed.url = resfeed.url;
resfeed.outfits = outfits;
resfeed.fav = false;
resfeed.bough = false;
// retrieve store infos
/*>>>*/ store.find({"_id":resfeed.store_id}, {limit: 1}).toArray(function(e, resstore) {
console.log("\t############# STORE_FIND { i:" + i + "}");
if (e) console.log("error: ", e);
resfeed.store = resstore[0];
resfeed.store.class = "hem";
ret_feeds.push(resfeed);
if(ret_feeds.length >= feed.length)
{
console.log("\t\t######################calling return [ ret_feeds.length = " + ret_feeds.length + " feed.length = " + feed.length);
out.send(ret_feeds);
}
});
}
});
});
});
});
}
This code fails, because returns the json before finishing its task, so the next time that it tries to return another json it crashes miserably due to the fact the the headers have already been sent.
Now as you can see, I have 3 collections: userfeed, apparel and stores.
the goal of this function is to retrieve all the items in the userfeed collection, extract the outfits (based on the outfit_id array that is part of the userfeed collection), and also extract the store infos related in the same way to each userfeed entry, like so:
I know that async.js or equivalent is the way to go: I've red like a gazillion of other posts here on SO, but I still can't get my head around it, probably because the whole mechanism behind the async.js or flow control in general it's still out of focus in my mind.
I'm still a noob at node :)
UPDATE
I think I found the right path for understanding here: http://www.sebastianseilund.com/nodejs-async-in-practice
this guy made a terrific job in describing use-case by use-case all the ways to apply async.js to your code.
I'll post the solution as soon as I get around it.
UPDATE 2
Thanks to the above dude I could work out a working solution, below is the answer.
After so much struggling I have finally managed to get a solution.
async.js was the answer as I was (obviously) suspecting.
FYI here's the working code.
If you like to point out improvements or anything else, you are more than welcome
var getUserFeed = function(thelimit, out) {
userfeed = db.collection("userfeed");
apparel = db.collection("apparel");
store = db.collection("stores");
var args;
if(thelimit)
args = {limit:thelimit, sort: [['date',-1]]};
var outfits=[];
var feeds = array();
async.series([
// userfeed find
function(callback) {
userfeed.find({},args).toArray(function(e, feed) {
if(e) callback(e);
feeds = array(feed);
console.log(feeds.length + " retrieved. stepping in");
callback(null, null);
});
},
// join
function(callback) {
async.forEach(feeds, function(thefeed, callback) {
var i = feeds.indexOf(thefeed);
async.parallel([
// load apparel infos
function(callback) {
console.log("\t >>> analyzing thefeed id " + thefeed._id);
async.forEach(thefeed.apparel_ids, function(apparel_id, callback) {
apparel.find({"_id": apparel_id},{limit:1}).toArray(function(e, results) {
if (e) console.log("error: ", e);
results = results[0];
if(results.apparel_cat == 1)
url_subcat = "pants/";
else if(results.apparel_cat == 2)
url_subcat = "shirts/";
else if(results.apparel_cat == 2)
url_subcat = "tshirts/";
results.thumb = baseurl + outfiturl + url_subcat + results.apparel_id + "/front.jpg";
results.size = "M"; ///// TOBE REAL VERY SOON
results.gallery = [
baseurl + outfiturl + url_subcat + results.apparel_id + "/model.jpg",
baseurl + outfiturl + url_subcat + results.apparel_id + "/front.jpg"
];
console.log("\t\t### pushing data into thefeed_index: " + i);
if(!util.isArray(feeds[i].oufits)) feeds[i].outfits = array();
feeds[i].outfits.push(results);
callback(null, null);
});
}, callback);
},
// load store infos
function(callback) {
store.find({"_id":thefeed.store_id}, {limit: 1}).toArray(function(e, resstore) {
console.log("\t### STORE_FIND");
if (e) console.log("error: ", e);
feeds[i].store = resstore[0];
feeds[i].store.class = "hem";
callback(null, null);
});
}
], callback);
}, callback);
}
// MAIN
], function(err, result) {
console.log("feed retrieval completed. stepping out");
if (err) return next(err);
out.send(feeds);
});
};
I have a tight loop that fetches PNG files via xhr2. Works fine in FF and IE10. In Chrome when I list of files hits about 5,500 I start getting xhr errors. I like the async interface since I am interleaving these requests with local indexedDB store requests.
Code below (I am using xhr2lib for fetches and PouchDB for the IndexedDB API).
I know that it is the XHR2 that is failing, since when this works, in Chrome, all the XHR2 calls are processed before the SaveDB() calls. When It fails, I never get the save calls.
function getBlobs(fileList) {
console.log("starting to fetch blobs");
$.each(fileList, function (i, val) {
var path = baseURL + val.id + "." + imageType;
$xhr.ajax({
url: path,
dataType: "blob",
success: function (data) { saveBlob(data, val.size, val.id); }
});
});
}
function saveBlob(blob, length, id) {
if (blob.size != length) {
console.error("Blob Length found: " + blob.size + " expected: " + length);
}
putBlob(blob, id);
++fetchCnt;
if (fetchCnt == manifest.files.length) {
setTimeout(fetchComplete, 0);
}
}
function fetchComplete() {
var startTime = vm.get("startTime");
var elapsed = new Date() - startTime;
var fetchTime = ms2Time(elapsed);
vm.set("fetchTime", fetchTime);
}
function putBlob(blob, id) {
var cnt;
var type = blob.type;
DB.putAttachment(id + "/pic", blob, type, function (err, response) {
if (err) {
console.error("Could store blob: error: " + err.error + " reason: " + err.reason + " status: " + err.status);
} else {
console.log("saved: ", response.id + " rev: " + response.rev);
cnt = vm.get("blobCount");
vm.set("blobCount", ++cnt);
if (cnt == manifest.files.length) {
setTimeout(storeComplete, 0);
}
}
});
}
The chromium folks acknowledge this is something that they should fix: https://code.google.com/p/chromium/issues/detail?id=244910, but in the meantime I have implemented throttling using jquery defer/resolve to keep the number of threads low.
I want it to show the div relating to the tab if it = 1, or hide it if it = 0.
Script:
function blah(){
loadtab('a');
loadtab('b');
loadtab('c');
}
var page = cheese
function loadtab(tab){
$('#'+tab).hide();
$('#'+tab).load("devices/" + page + ".html " + "#" + tab);
var tabcontent = $("#"+tab).text();
alert(tab); //works
alert(tabcontent); //doesn't
if (tabcontent == "1"){
$('#'+tab).show();
}
else{
$('#'+tab).hide();
}
}
*variable defined in previous code
HTML on cheese.html:
<div id="a">0</div>
<div id="b">0</div>
<div id="c">1</div>
Alerting tab gives a, b and c in seperate alerts. Alerting tabcontent gives a blank alert. Why is this?
Seems like you're getting the text before the load method finishes populating the tab. Try executing your code in the callback of .load.
$('#'+tab).load("devices/" + page + ".html " + "#" + tab, function(){
var tabcontent = $("#"+tab).text();
alert(tab); //works
alert(tabcontent); //doesn't
if (tabcontent == "1"){
$('#'+tab).show();
}
else{
$('#'+tab).hide();
}
});
On a side note, you should probably cache $('#'+tab) as you use it pretty often in that function.
To cache your selector, simply do this.
var $tab = $('#'+tab); // store your jQuery object into a variable
$tab.hide();
$tab.load("devices/" + page + ".html " + "#" + tab);
var tabcontent = $tab.text();
alert(tab); //works
alert(tabcontent); //doesn't
if (tabcontent == "1"){
$tab.show();
}
else{
$tab.hide();
}
It just improves performance as jQuery doesn't have to keep searching the DOM for the tab, you simply keep reference to it in another variable at the start of your function.
try to do this:
function loadtab(tab){
$('#'+tab).hide();
$('#'+tab).load("devices/" + page + ".html " + "#" + tab, function() {
var tabcontent = $("#"+tab).text();
alert(tab); //works
alert(tabcontent); //doesn't
if (tabcontent == "1"){
$('#'+tab).show();
}
else{
$('#'+tab).hide();
}
});
}