How to write and read a json file in ionic - html

I'm using ionic to make an app and I need store some data and read, I don't know how to do, you can give me some directions? So... be more specific... In this app have an option to see the history of order(is a buy app), so everytime an user make an order I need save and when he wish he can see all order what he makes, to do this I need load all json from storage and show, and i dont know how to save of the right way to read dynamically after?

You can use $cordovaFile services :
const fileName = "orders.json"
var getUserOrders = function () {
var d = $q.defer(),
userOrders;
$cordovaFile.checkFile(cordova.file.dataDirectory, fileName).then(
function (success) {
$cordovaFile.readAsText(cordova.file.dataDirectory, fileName).then(
function (data) {
d.resolve(JSON.parse(data));
}, function (error) {
...
});
}, function (error) {
// No orders saved
d.resolve([]);
}
);
};
var saveAnOrder = function (order) {
var d = $q.defer(),
orderToSave = order;
getUserOrders().then(
function (data) {
var userOrders = data;
userOrders.push(orderToSave);
$cordovaFile.writeFile(cordova.file.dataDirectory, fileName, JSON.stringify(userOrders), true).then(
function (success) {
d.resolve(userOrders);
}, function (error) {
...
});
}, function (error) {
...
}
);
};

Related

Resize all existing images stored in firebase storage and update the newly resized image url to database via api call [duplicate]

This question already has an answer here:
How can I resize all existing images in firebase storage?
(1 answer)
Closed 9 months ago.
I have requirement to resize new and existing images stored in firebase store. For new image, I enabled firebase's resize image extension. For existing image, how can I resized the image and get the newly resized image url to update back to database via api.
Here is my firebase function to get existing image urls from database. My question is how to resize the image and get the new image url?
const functions = require("firebase-functions");
const axios =require("axios");
async function getAlbums() {
const endpoint = "https://api.mydomain.com/graphql";
const headers = {
"content-type": "application/json",
};
const graphqlQuery = {
"query": `query Albums {
albums {
id
album_cover
}
}`
};
functions.logger.info("Call API");
const response = await axios({
url: endpoint,
method: 'post',
headers: headers,
data: graphqlQuery
});
if(response.errors) {
functions.logger.info("API ERROR : ", response.errors) // errors if any
} else {
return response.data.data.albums;
}
}
exports.manualGenerateResizedImage = functions.https.onRequest(async () => {
const albums = await getAlbums();
functions.logger.info("No. of Album : ", albums.length);
});
I think the below answer from Renaud Tarnec will definitely help you.
If you look at the code of the "Resize Images" extension, you will see that the Cloud Function that underlies the extension is triggered by a onFinalize event, which means:
When a new object (or a new generation of an existing object) is
successfully created in the bucket. This includes copying or rewriting
an existing object.
So, without rewriting/regenerating the existing images the Extension will not be triggered.
However, you could easily write your own Cloud Function that does the same thing but is triggered, for example, by a call to a specific URL (HTTPS cloud Function) or by creating a new document in a temporary Firestore Collection (background triggered CF).
This Cloud Function would execute the following steps:
Get all the files of your bucket, see the getFiles() method of the
Google Cloud Storage Node.js Client API. This method returns a
GetFilesResponse object which is an Array of File instances.
By looping over the array, for each file, check if the file has a
corresponding resized image in the bucket (depending on the way you
configured the Extension, the resized images may be in a specific
folder)
If a file does not have a corresponding resized image, execute the
same business logic of the Extension Cloud Function for this File.
There is an official Cloud Function sample which shows how to create a Cloud Storage triggered Firebase Function that will create resized thumbnails from uploaded images and upload them to the database URL, (see the last lines of index.js file)
Note : If you have a lot of files to treat, you should most probably work by batch, since there is a limit of 9 minutes for Cloud Function execution. Also, depending on the number of images to treat, you may need to increase the timeout value and/or the allocated memory of your Cloud Function, see https://firebase.google.com/docs/functions/manage-functions#set_timeout_and_memory_allocation
In case someone need it. This is how I resized existing image.
const functions = require("firebase-functions");
const axios = require("axios");
const { Storage } = require("#google-cloud/storage");
const storage = new Storage();
// Don't forget to replace with your bucket name
const bucket = storage.bucket("projectid.appspot.com");
async function getAlbums() {
const endpoint = "https://api.mydomain.com/graphql";
const headers = {
"content-type": "application/json",
};
const graphqlQuery = {
query: `query Albums {
albums {
id
album_cover
}
}`,
};
const response = await axios({
url: endpoint,
method: "post",
headers: headers,
data: graphqlQuery,
});
if (response.errors) {
functions.logger.error("API ERROR : ", response.errors); // errors
if any
} else {
return response.data.data.albums;
}
}
function getFileName(url) {
var decodeURI = decodeURIComponent(url);
var index = decodeURI.lastIndexOf("/") + 1;
var filenameWithParam = decodeURI.substr(index);
index = filenameWithParam.lastIndexOf("?");
var filename = filenameWithParam.substr(0, index);
return filename;
}
function getFileNameFromFirestore(url) {
var index = url.lastIndexOf("/") + 1;
var filename = url.substr(index);
return filename;
}
const triggerBucketEvent = async () => {
bucket.getFiles(
{
prefix: "images/albums", // you can add a path prefix
autoPaginate: false,
},
async (err, files) => {
if (err) {
functions.logger.error(err);
return;
}
const albums = await getAlbums();
await Promise.all(
files.map((file) => {
var fileName = getFileNameFromFirestore(file.name);
var result = albums.find((obj) => {
return getFileName(obj.album_cover) === fileName;
});
if (result) {
var file_ext = fileName.substr(
(Math.max(0, fileName.lastIndexOf(".")) || Infinity) + 1
);
var newFileName = result.id + "." + file_ext;
// Copy each file on thumbs directory with the different name
file.copy("images/albums/" + newFileName);
} else {
functions.logger.info(file.name, " not found in album list!");
}
})
);
}
);
};
exports.manualGenerateResizedImage = functions.https.onRequest(async () => {
await triggerBucketEvent();
});

Why is async.map passing only the value of my JSON?

I have a function in node.js that looks like this:
exports.getAllFlights = function(getRequest) {
// this is the package from npm called "async"
async.map(clients, getFlight, function(err, results) {
getRequest(results);
});
}
The variable clients should be a JSON that looks like this:
{'"A4Q"': 'JZA8187', "'B7P"': 'DAL2098' }.
I expect that the map function will pass the individual indices of the array of the variable clients to getFlight. However, instead it passed the values of that each(ex: 'DAL2098', 'JZA8187' and so on).
Is this the expected functionality? Is there a function in async that will do what I want?
The signature of getFlight is getFlight(identifier, callback). Identifier is what is currently messed up. It returns callback(null, rtn). Null reprsents the nonexistence of an error, rtn represents the JSON that my function produces.
Yes, that's the expected result. The documentation is not very clear but all iterating functions of async.js pass the values of the iterable, not the keys. There is the eachOf series of functions that pass both key and value. For example:
async.eachOf(clients, function (value, key, callback) {
// process each client here
});
Unfortunately there is no mapOf.
If you don't mind not doing things in parallel you can use eachOfSeries:
var results = [];
async.eachOfSeries(clients, function (value, key, callback) {
// do what getFlight needs to do and append to results array
}, function(err) {
getRequest(results);
});
Another (IMHO better) workaround is to use proper arrays:
var clients = [{'A4Q': 'JZA8187'},{'B7P': 'DAL2098'}];
Then use your original logic. However, I'd prefer to use a structure like the following:
var clients = [
{key: 'A4Q', val: 'JZA8187'},
{key: 'B7P', val: 'DAL2098'}
];
First create a custom event. Attach a listener for return data. then process it.
var EventEmitter = require('events');
var myEmitter = new EventEmitter();
myEmitter.emit('clients_data',{'"A4Q"': 'JZA8187'}); //emit your event where ever
myEmitter.on('clients_data', (obj) => {
if (typeof obj !=='undefined') {
if (obj.contructor === Object && Object.keys(obj).lenth == 0) {
console.log('empty');
} else {
for(var key in obj) {
var value = obj[key];
//do what you want here
}
}
}
});
Well, you need to format your clients object properly before you can use it with async.map(). Lodash _.map() can help you:
var client_list = _.map(clients, function(value, key) {
var item = {};
item[key] = value;
return item;
});
After that, you will have an array like:
[ { A4Q: 'JZA8187' }, { B7P: 'DAL2098' } ]
Then, you can use async.map():
exports.getAllFlights = function(getRequest) {
async.map(client_list, getFlight, function(err, results) {
getRequest(results);
});
};

How to create function to get data from WCF Rest then display to table

I have declare function to get WCF Rest the name is service.js, the url get Json data. Then I create another function to get data entryCtrl.js then show to html
service.js
(function (app) {
app.service("CRUD_AngularJs_RESTService", function ($http) {
this.getAllEntry = function () {
return $http.get("http://localhost:51458/ServiceRequest.svc/GetAllRequest/");
};
});
})(angular.module('model'));
entryCtrl.js
(function (app) {
'use strict';
app.controller('entryCtrl', entryCtrl);
entryCtrl.$inject = ['$scope'];
function entryCtrl($scope) {
$scope.pageClass = 'page-entry';
$scope.GetAllRecords = function() {
var promiseGet = CRUD_AngularJs_RESTService.getAllEntry();
promiseGet.then(function (pl) { $scope.EntryData = pl.data },
function (errorPl) {
$log.error('Some Error in Getting Records.', errorPl);
});
}
}
})(angular.module('model'));
view entry.html
<table data-ng-controller="entryCtrl">
<tbody data-ng-repeat="entry in EntryData">
<tr>
<td>{{entry.name}}</td>
<td>{{entry.telpon}}</td>
<td>{{entry.foobar}}</td>
</tr>
</tbody>
</table>
I don't have any error, my data in table not show anything. What must I try to know the function it's work or not?
jush have warning XMLHttpRequest on the main thread is deprecated because of its detrimental effects to the end user's experience. I don't know what it is mean.
The function GetAllRecords() is not set to the $scope. You need to set $scope.GetAllRecords = GetAllRecords before the call to $scope.GetAllRecords():
function entryCtrl($scope) {
$scope.pageClass = 'page-entry';
$scope.GetAllRecords = function() {
var promiseGet = CRUD_AngularJs_RESTService.getAllEntry();
promiseGet.then(function (pl) { $scope.EntryData = pl.data },
function (errorPl) {
$log.error('Some Error in Getting Records.', errorPl);
});
}
$scope.GetAllRecords();
}
Alternatively, you can simply call GetAllRecords() directly, since you don't seem to need it in the $scope:
function entryCtrl($scope) {
$scope.pageClass = 'page-entry';
(function() {
var promiseGet = CRUD_AngularJs_RESTService.getAllEntry();
promiseGet.then(function (pl) { $scope.EntryData = pl.data },
function (errorPl) {
$log.error('Some Error in Getting Records.', errorPl);
});
})();
}

AngularJS and Dreamfactory: $resource.save() not saving all the records in loop or in queue

I'm trying to save the data using $resource and for REST API using Dreamfactory. I do have seperate mysql table for multiple addresses as user might have multiple addresses.
I'm looping through multiple addresses and querying to the API but some how it doesn't store in sequence or split some of the data to store
Here is my code:
Members.save($scope.member,
function () {
//console.log('POST',arguments );
console.log('POST', arguments[0].id);
$scope.mid = arguments[0].id;
if($scope.addresses)
{
$.each($scope.addresses,function(k,v){
//alert(k+"=>"+v);
Members_address.save({"member_id":$scope.mid,"address":v},
function () {
alert(k+"=>"+v);
console.log('POST',arguments );
}, function () {
console.error('POST', arguments);
}
);
});
window.location = "#/members";
}else
{
window.location = "#/members";
}
}, function () {
console.error('POST', arguments);
}
);
"Members" is factory to store data into members table and "Members_address" is a factory to store data in separate members_address table with member id.
It stores addresses but not in sequence and sometime it missed one of the address.
Here are the factories:
App.factory('Members',['$resource',function ($resource) {
return $resource('API_URL', null, {'update': { method:'PUT' }
});
}])
App.factory('Members_address',['$resource',function ($resource) {
return $resource('API_URL', null, {'update': { method:'PUT' }
});
}])
Check this example at JSFiddle using recursion, I think this is what you want.
Ok. So, one fundamental thing is the thought process when you working with data like this (especially with DreamFactory). You want to create a 'payload' of data to send to the server rather that trying to iteratively save records. Make an array of the records of a specific type that you want to save and then push them up to the server. In this case...you want to save a member(single object) and then addresses associated with that member(array of objects). The process is 'send member record to server then on save success, if I have addresses, create address objects with member id and store them in an array and send to the server. Then on address save success redirect.'
So here's the code I came up with. Hope it helps you out. Also, checkout $location for redirecting as opposed to window.location. And if your in need of the window object checkout $window. And...try only to use jQuery inside of directives to do DOM manipulation. AngularJS provides a lot of functionality for manipulating data.
// I'm assuming that your 'API_URL' contains the following information shown
// in this factory definition.
App.factory('Members', ['$resource', function ($resource) {
return $resource('YOUR_DSP_URL/TABLE_NAME', null, {'update': { method:'PUT' } });
}]);
App.factory('Members_address', ['$resource', function ($resource) {
return $resource('YOUR_DSP_URL/TABLE_NAME', null, {'update': { method:'PUT' } });
}]);
App.controller('SomeCtrl', ['Members', 'Members_address', '$scope', '$location', function(Members, Members_address, $scope, $location) {
$scope.member = {}; // Member data object
$scope.addresses = []; // Array of address strings
// attach to UI button to trigger the save
$scope.saveMember = function() {
// Save the member
Member.save($scope.member).then(
// Handle Member save success
function(result) {
// Check for addresses
if ($scope.addresses) {
// Create a temporary var to hold our payload
// to the server
var payload = [];
// Assemble address objects for insertion in db
angular.forEach($scope.addresses, function (_string) {
// create a temporary var to hold our address
var tempAddressObject = {};
// add member id
tempAddressObject['member_id'] = $scope.member.id;
// add address
tempAddressObject['address'] = _string;
// store on temporary payload array;
payload.push(tempAddressObject);
});
// Check that we have some records in our payload
if (payload.length > 0) {
// Send to the server
Members.address.save(payload).then(
// Handle Success
function(result) {
console.log(result);
// redirect
$location.url('/members');
},
// handle Error
function (reject) {
console.log(error);
// redirect
$location.url('/members');
}
)
}
}
},
// Handle Member save error
function(reject) {
console.log(reject);
}
);
}
}]);

Call multiple JSON data/files in one getJson request

I have this code:
var graphicDataUrl = 'graphic-data.json';
var webDataUrl = 'web-data.json';
var templateHtml = 'templating.html';
var viewG = $('#view-graphic');
var viewW = $('#view-web');
$.getJSON(dataUrls, function(data) {
$.get(templateHtml, function(template) {
template = Handlebars.compile(template);
var example = template({ works: data });
viewG.html(example);
viewW.html(example);
});
});
What is the best way for call both webDataUrl and graphicDataUrl JSONs and use their data in order to display them in two different div (#viewG and #viewW)?
The best way is to do each one individually, and to handle error conditions:
$.getJSON(graphicDataUrl)
.then(function(data) {
// ...worked, put it in #view-graphic
})
.fail(function() {
// ...didn't work, handle it
});
$.getJSON(webDataUrl, function(data) {
.then(function(data) {
// ...worked, put it in #view-web
})
.fail(function() {
// ...didn't work, handle it
});
That allows the requests to happen in parallel, and updates the page as soon as possible when each request completes.
If you want to run the requests in parallel but wait to update the page until they both complete, you can do that with $.when:
var graphicData, webData;
$.when(
$.getJSON(graphicDataUrl, function(data) {
graphicData = data;
}),
$.getJSON(webDataUrl, function(data) {
webData = data;
})
).then(function() {
if (graphicData) {
// Worked, put graphicData in #view-graphic
}
else {
// Request for graphic data didn't work, handle it
}
if (webData) {
// Worked, put webData in #view-web
}
else {
// Request for web data didn't work, handle it
}
});
...but the page may seem less responsive since you're not updating when the first request comes back, but only when both do.
Just in case it is useful to anyone else who may come across this — and thanks to the Promise advances in jQuery — T.J. Crowder's answer can now be improved into one succinct and general function:
/**
* Load multiple JSON files.
*
* Example usage:
*
* jQuery.getMultipleJSON('file1.json', 'file2.json')
* .fail(function(jqxhr, textStatus, error){})
* .done(function(file1, file2){})
* ;
*/
jQuery.getMultipleJSON = function(){
return jQuery.when.apply(jQuery, jQuery.map(arguments, function(jsonfile){
return jQuery.getJSON(jsonfile);
})).then(function(){
var def = jQuery.Deferred();
return def.resolve.apply(def, jQuery.map(arguments, function(response){
return response[0];
}));
});
};
However the point about not giving any feedback to the user — whilst waiting for the full load — is a good one. So for those that prefer to give responsive feedback, here's a slightly more complicated version that supports progress.
/**
* Load multiple json files, with progress.
*
* Example usage:
*
* jQuery.getMultipleJSON('file1.json', 'file2.json')
* .progress(function(percent, count, total){})
* .fail(function(jqxhr, textStatus, error){})
* .done(function(file1, file2){})
* ;
*/
jQuery.getMultipleJSON = function(){
var
num = 0,
def = jQuery.Deferred(),
map = jQuery.map(arguments, function(jsonfile){
return jQuery.getJSON(jsonfile).then(function(){
def.notify(1/map.length * ++num, num, map.length);
return arguments;
});
})
;
jQuery.when.apply(jQuery, map)
.fail(function(){ def.rejectWith(def, arguments); })
.done(function(){
def.resolveWith(def, jQuery.map(arguments, function(response){
return response[0];
}));
})
;
return def;
};
This code is simple and you can access both response together in one function:
$.when(
$.getJSON(graphicDataUrl),
$.getJSON(webDataUrl)
).done(function(data1, data2) {
console.log(data1[0]);
console.log(data2[0]);
});