IndexedDB transaction always throwing onabort from add() method in Chrome - google-chrome

I just started experimenting with IndexedDB. I copied an example and pared it down to a small HTML page: Push a button; add a record; dump all the records to the console after the transaction completes.
It runs fine in IE11, but not on Chrome.
The request=transaction.objectstore("store").add({k:v})is always executing the request.onsuccess() method, but the transaction is always resolved with transaction.onabort() by Chrome. Same with .put().
This is the code:
<!DOCTYPE html>
<html>
<head>
<script>
//--- globals
var db;
// The initialization of our stuff in body.onload()
function init() {
var dbVersion = 1;
//--- Try to delete any existing database
var delRequest = indexedDB.deleteDatabase("notesDB");
delRequest.onsuccess = function (de) {
dbOpen(); // .... then open a new one
};
delRequest.onerror = function (de) {
dbOpen(); // ... or open a new one if one doesn't exist to delete
};
function dbOpen () {
var openRequest = indexedDB.open("notesDB", dbVersion);
openRequest.onupgradeneeded = function (e) {
var ldb = e.target.result;
console.log("running onupgradeneeded; always start with a fresh object store");
if (ldb.objectStoreNames.contains("note")) {
ldb.deleteObjectStore("note");
}
if (!ldb.objectStoreNames.contains("note")) {
console.log("creating new note data store");
var objectStore = ldb.createObjectStore("note", { autoIncrement: true });
objectStore.createIndex("title", "title", { unique: false });
}
};
openRequest.onsuccess = function (e) {
db = e.target.result;
db.onerror = function (event) {
// Generic error handler for all errors targeted at this database
alert("Database error: " + event.target.errorCode);
console.dir(event.target);
};
console.log("Database opened; dump existing rows (shouldn't be any)");
displayNotes();
};
openRequest.onerror = function (e) {
console.log("Open error");
console.log(e);
console.dir(e);
};
}
function displayNotes() {
console.log("TODO - print something nice on the page");
var tx = db.transaction("note", "readonly");
tx.oncomplete = function (event) { console.log("read only cursor transaction complete"); }
tx.onerror = function (event) { console.log("readonly transaction onerror"); }
tx.onabort = function (event) { console.log("readonly transaction onabort"); }
// --- iterate cursor
console.log("---Start cursor dump---")
var ds = tx.objectStore("note");
ds.openCursor().onsuccess = function (event) {
var cursor = event.target.result;
if (cursor) {
console.log(cursor.key);
console.dir(cursor.value);
cursor.continue();
}
else {
console.log("---End cursor dump---");
}
};
}
document.querySelector("#test").addEventListener("click", function (clickevent) {
try {
var transaction = db.transaction("note", "readwrite");
transaction.oncomplete = function (event) {
console.log("Cursor dump in 'add' read/write transaction oncomplete");
displayNotes();
console.log("add transaction oncomplete done!");
};
transaction.onerror = function (event) {
console.log("add transaction onerror");
};
transaction.onabort = function (event) {
console.log("add transaction onabort");
};
var objectStore = transaction.objectStore("note");
var request = objectStore.add({
title: "note header",
body: "this is random note body content " + Math.floor(Math.random() * 1000)
});
request.onsuccess = function (event) {
console.log("add request onsuccess");
};
request.onerror = function (event) {
console.log("add request onerror");
console.dir(event);
};
}
catch (e) {
console.log('catchall exception');
console.log(e);
alert("bad things done");
}
});
}
</script>
</head>
<body onload="init()">
<h1>IndexedDB simplest example</h1>
<p>
<button id="test">Push To Add Row To IndexedDB</button>
</p>
</body>
</html>

I clicked the button a bunch of times and it worked every time.
What error are you getting when it aborts? Look in event.target.error in the onabort handler to see. It could be a QuotaExceededError, which would mean that either you have very low hard drive space or you have a lot of data stored in Chrome for your domain. If that's the case, it's good you're running into it now, because you do need to gracefully handle this case, otherwise users will hit it and be confused.

Related

FileReader sometimes works and sometimes not with cordova

The method FileReader.readAsArrayBuffer does not execute the handlers and no gives no error.
The problem occurs sometimes, not in runtime, but when I runs the r.js proccess (with grunt grunt-contrib-requirejs).
It's very strange.
The code is:
CoFS.prototype.readFromFileObject = function (file, callback) {
var self = this;
this._log("Creating filereader object", file);
var reader = new FileReader();
reader.onloadend = function (evt) {
self._log("load end call! with large (relative): ", this.result.length );
if (!this.result) return null;
var Buf = arrayBufferToBuffer(this.result);
callback(null, Buf);
};
reader.onerror = function (ev) {
self._log("Error Reading file", ev);
callback(new Error('Reading file'));
};
reader.onabort = function (ev) {
self._log("Reading file Abort!!");
callback(new Error('Reading abort'));
};
reader.onload = function () {
self._log("load");
};
reader.onloadstart = function () {
self._log("start");
};
reader.onprogress = function () {
self.log("progress");
};
try {
reader.readAsArrayBuffer(file);
} catch (e) {
self._log(e);
}
};
At first I thought this was a failure to define some objects:
if (!FileReader) FileReader = window.FileReader || null;
if (!requestFileSystem) requestFileSystem = window.requestFileSystem || window.webkitRequestFileSystem || null;
if (!File) File = window.File || null;
if (!FileReader || !File)
throw new Error("Objects of file API does not exists!");
But debugging, I realize that all variables are what they should be:
debug http://esfriki.com/f/wtf.min.png
Update: I edit this, but finnaly this does not because the minify, this is an error what ocurr randomly when I build.

In chrome App, video file is not saved with its original data/size

In appCtrl.js, for saving video file -
$('#save_file').click(function(e) {
var config = {type: 'saveFile', suggestedName: chosenEntry.name};
chrome.fileSystem.chooseEntry(config, function(writableEntry) {
//blob content is the DataUrl
var blob = new Blob([$scope.blobContent], {type: 'video/mp4'});
$scope.writeFileEntry(writableEntry, blob, function(e) {
console.log('Write complete :)');
});
});
});
$scope.writeFileEntry = function(writableEntry, opt_blob, callback) {
if (!writableEntry) {
console.log('Nothing selected.');
return;
}
writableEntry.createWriter(function(writer) {
writer.onerror = $scope.errorHandler;
writer.onwriteend = callback;
// If we have data, write it to the file. Otherwise, just use the file we
// loaded.
if (opt_blob) {
writer.truncate(opt_blob.size);
$scope.waitForIO(writer, function() {
writer.seek(0);
writer.write(opt_blob);
});
}
else {
chosenEntry.file(function(file) {
writer.truncate(file.fileSize);
waitForIO(writer, function() {
writer.seek(0);
writer.write(file);
});
});
}
}, $scope.errorHandler);
}
$scope.waitForIO = function(writer, callback) {
// set a watchdog to avoid eventual locking:
var start = Date.now();
// wait for a few seconds
var reentrant = function() {
if (writer.readyState===writer.WRITING && Date.now()-start<4000) {
setTimeout(reentrant, 100);
return;
}
if (writer.readyState===writer.WRITING) {
console.error("Write operation taking too long, aborting!"+
" (current writer readyState is "+writer.readyState+")");
writer.abort();
}
else {
callback();
}
};
setTimeout(reentrant, 100);
};
In above code the video file is saved but when i tried to play that saved file in Window Media Player or VLC player , it prompt me as Window media player cannot play the file.The player might not support the file type or might not support the codec that was used to compress the file.
Can u please guide me where m getting wrong, as its my first chrome app.
Thanks in advance.
Change the method to store blob like this.
function dataURItoBlob(dataURI, callback) {
// convert base64 to raw binary data held in a string
// doesn't handle URLEncoded DataURIs
var byteString = atob(dataURI.split(',')[1]);
// separate out the mime component
var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0]
// write the bytes of the string to an ArrayBuffer
var ab = new ArrayBuffer(byteString.length);
var ia = new Uint8Array(ab);
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
return new Blob([ab], {type: 'video/mp4'});
};
To handle click.
$('#save_file').click(function(e) {
var config = {type: 'saveFile', suggestedName: chosenEntry.name};
chrome.fileSystem.chooseEntry(config, function(writableEntry) {
var blob = dataURItoBlob($scope.blobContent);
$scope.writeFileEntry(writableEntry, blob, function(e) {
console.log('Write complete :)');
});
});
});

How to free up memory when saving images inside IndexDB

I have a no of images on page and trying to save it inside IndexDb if it does not exist.
All seems to be working fine and images load up instantly if it exist but looks like browser memory is leaking. It's give some jerk and hang sometime. I m not sure how this can be handle, I have written a directive that looks like this
(function () {
'use strict';
// TODO: replace app with your module name
angular.module('app').directive('imageLocal', imageLocal);
imageLocal.$inject = ['$timeout', '$window', 'config', 'indexDb'];
function imageLocal($timeout, $window, config, indexDb) {
// Usage:
//
// Creates:
//
var directive = {
link: link,
restrict: 'A'
};
return directive;
function link(scope, element, attrs) {
var imageId = attrs.imageLocal;
// Open a transaction to the database
var transaction;
$timeout(function () {
transaction = indexDb.db.transaction(["mystore"], "readwrite");
getImage();
}, 500);
function getImage() {
transaction.objectStore('mystore').get(imageId)
.onsuccess = function (event) {
var imgFile = event.target.result;
if (imgFile == undefined) {
saveToDb(imgFile);
return false;
}
showImage(imgFile);
}
}
function showImage(imgFile) {
console.log('getting');
// Get window.URL object
var url = $window.URL || $window.webkitURL;
// Create and revoke ObjectURL
var imageUrl = url.createObjectURL(imgFile);
element.css({
'background-image': 'url("' + imageUrl + '")',
'background-size': 'cover'
});
}
function saveToDb() {
// Create XHR
var xhr = new XMLHttpRequest(),
blob;
xhr.open("GET", config.remoteServiceName + '/image/' + imageId, true);
// Set the responseType to blob
xhr.responseType = "blob";
xhr.addEventListener("load", function () {
if (xhr.status === 200) {
console.log("Image retrieved");
// Blob as response
blob = xhr.response;
console.log("Blob:" + blob);
// Put the received blob into IndexedDB
putInDb(blob);
}
}, false);
// Send XHR
xhr.send();
function putInDb(blob) {
// Open a transaction to the database
transaction = indexDb.db.transaction(["mystore"], "readwrite");
// Put the blob into the database
var request = transaction.objectStore("mystore").add(blob, imageId);
getImage();
request.onsuccess = function (event) {
console.log('saved');
}
};
}
}
}
})();

Parallel form submit and ajax call

I have a web page that invokes long request on the server. The request generates an excel file and stream it back to the client when it is ready.
The request is invoked by creating form element using jQuery and invoking the submit method.
I would like during the request is being processed to display the user with progress of the task.
I thought to do it using jQuery ajax call to service I have on the server that returns status messages.
My problem is that when I am calling this service (using $.ajax) The callback is being called only when the request intiated by the form submit ended.
Any suggestions ?
The code:
<script>
function dummyFunction(){
var notificationContextId = "someid";
var url = $fdbUI.config.baseUrl() + "/Promis/GenerateExcel.aspx";
var $form = $('<form action="' + url + '" method="POST" target="_blank"></form>');
var $hidden = $("<input type='hidden' name='viewModel'/>");
$hidden.val(self.toJSON());
$hidden.appendTo($form);
var $contextId = new $("<input type='hidden' name='notifyContextId'/>").val(notificationContextId);
$contextId.appendTo($form);
$('body').append($form);
self.progressMessages([]);
$fdbUI.notificationHelper.getNotifications(notificationContextId, function (message) {
var messageText = '';
if (message.IsEnded) {
messageText = "Excel is ready to download";
} else if (message.IsError) {
messageText = "An error occured while preparing excel file. Please try again...";
} else {
messageText = message.NotifyData;
}
self.progressMessages.push(messageText);
});
$form.submit();
}
<script>
The code is using utility library that invokes the $.ajax. Its code is:
(function () {
if (!window.flowdbUI) {
throw ("missing reference to flowdb.ui.core.");
}
function NotificationHelper() {
var self = this;
this.intervalId = null;
this.getNotifications = function (contextId, fnCallback) {
if ($.isFunction(fnCallback) == false)
return;
self.intervalId = setInterval(function() {
self._startNotificationPolling(contextId, fnCallback);
}, 500);
};
this._startNotificationPolling = function (contextId, fnCallback) {
if (self._processing)
return;
self._processing = true;
self._notificationPolling(contextId, function (result) {
if (result.success) {
var message = result.retVal;
if (message == null)
return;
if (message.IsEnded || message.IsError) {
clearInterval(self.intervalId);
}
fnCallback(message);
} else {
clearInterval(self.intervalId);
fnCallback({NotifyData:null, IsEnded:false, IsError:true});
}
self._processing = false;
});
};
this._notificationPolling = function (contextId, fnCallback) {
$fdbUI.core.executeAjax("NotificationProvider", { id: contextId }, function(result) {
fnCallback(result);
});
};
return this;
}
window.flowdbUI.notificationHelper = new NotificationHelper();
})();
By default, ASP.NET will only allow a single concurrent request per session, to avoid race conditions. So the server is not responding to your status requests until after the long-polling request is complete.
One possible approach would be to make your form post return immediately, and when the status request shows completion, start up a new request to get the data that it knows is waiting for it on the server.
Or you could try changing the EnableSessionState settings to allow multiple concurrent requests, as described here.

HTML FileReader

function fileSelected() {
// get selected file element
var files = document.getElementById('files[]').files;
for (var i = 0; i < files.length; i++) //for multiple files
{
(function (file) {
var fileObj = {
Size: bytesToSize(file.size),
Type: file.type,
Name: file.name,
Data: null
};
var reader = new window.FileReader();
reader.onload = function (e) {
fileObj.Data = e.target.result;
};
// read selected file as DataURL
reader.readAsDataURL(file);
//Create Item
CreateFileUploadItem(fileObj);
})(files[i]);
}
}
function CreateFileUploadItem (item) {
console.log(item);
$('<li>', {
"class": item.Type,
"data-file": item.Data,
"html": item.Name + ' ' + item.Size
}).appendTo($('#filesForUpload'));
}
So when console.log(item) gets run in the CreateFileUploadItem function it shows the item.Data. YET it won't add it to the data-file of the LI. Why is that?
The call to readAsDataURL is asynchronous. Thus, the function call is likely returning prior to the onload function being called. So, the value of fileObj.Data is still null when you are attempting to use it in CreateFileUploadItem.
To fix it, you should move the call to CreateFileUploadItem into your onload function. As for the console logging the proper value, you can't rely on that being synchronous either. I think using a breakpoint during debugging at that line instead will likely show the true null value.