In EXTJS i will use a model and store for my grid. Now is the problem that sometimes the json will not match the model. There will be less information then in my model. When this happens EXTJS will not show any data in the grid. So i looked for a fix and found this:
Ext.define('App.Reader', {
extend: 'Ext.data.reader.Json',
extractData: function(root) {
var me = this,
values = [],
records = [],
Model = me.model,
i = 0,
length = root.length,
idProp = me.getIdProperty(),
node, id, record;
if (!root.length && Ext.isObject(root)) {
root = [root];
length = 1;
}
for (; i < length; i++) {
node = root[i];
values = me.extractValues(node);
id = me.getId(node);
record = new Model(values, id, node);
records.push(record);
if (me.implicitIncludes) {
me.readAssociated(record, node);
}
}
return records;
},
extractValues: function(data) {
var fields = this.getFields(),
i = 0,
length = fields.length,
output = {},
field, value;
for (; i < length; i++) {
field = fields[i];
value = this.extractorFunctions[i](data);
if(value === undefined)
{
Ext.iterate(fields, function(key, val) {
if (data[key] === undefined & i==val) {
console.log( "Model field <" + key.name + "> does not exist in data/node.");
value = "INVALID OR MISSING FIELD NAME";
var p = 0;
for(var prop in data) {
if(p==i){
if(data.hasOwnProperty(prop))console.log("Instead of <" + key.name + "> we have <" + prop + "> with value <" + data[prop]+ ">");
}
p++;
}
}
}, this);
}
output[field.name] = value;
}
return output;
}
});
var myReader = new App.Reader({
type:'json'
});
i found this online. But when i use this with EXTJS 4.1.1 there is an error in ext-all: TypeError: j is undefined.
Where should i look for the fix for this?
There's no need to do something complicated to solve this trivial problem. Read up on Ext.data.Model and Ext.data.Field, configure your Model properly and you're all set.
Related
I have written code in an ASP.NET Core 6 controller and calling this from view. This code gives response to my view but I don't know how to parse the data in view.
Previously I was using JsonrequestBehaviour.Allowget which is now deprecated in .NET 6. Please help me for better appraoch of json call which can return any dynamic object.
Here is my controller code:
public IActionResult GetAccountLevelAndCode(Int32 GroupAccountID, Int32 Companyid)
{
string AccountLevels = ""; string Returnerror; string ReturnBranches;
DataTable AL = new GetDataClass().GetAccountNoAndAndLevels(GroupAccountID, Companyid, out Returnerror);
//a = (GLChartOFAccountModel)AL.Rows[0].ConvertDataRowToObject(a);
string Sql = #"select cab.BranchID from GLChartOFAccount ca inner join GLChartOfAccountBranchDetail cab on ca.GLCAID=cab.GLCAID where cab.GLCAID=" + GroupAccountID;
DataTable dt = new DataTable();
dt = StaticClass.SelectAll(Sql).Tables[0];
AccountLevels = JsonConvert.SerializeObject(AL);
ReturnBranches = JsonConvert.SerializeObject(dt);
Returnerror = JsonConvert.SerializeObject(Returnerror);
return Json(new { AccountLevels, ReturnBranches, Returnerror });
}
Following is my view call and response allocation:
function GetAccountNoandLevel() {
var DATA={"GroupAccountID" : $('#isParent').val(), Companyid : #Model.CompanyID }
var execCode = true;
$.ajax({
async: false,
type: "POST",
url: "/GLChartOFAccount/GetAccountLevelAndCode",
data: DATA,
dataType: "json",
success: function (data) {
try {
var c = JSON.parse(data.AccountLevels)
var b = JSON.parse(data.ReturnBranches)
var er = JSON.parse(data.Returnerror)
if (b.length>0) {
$("#BrachIDs option").each(function () {
var idParent = $(this).parent().attr("id");
this.disabled = true;
});
var dataarray = '';
for (var i = 0; i < b.length; i++) {
dataarray += b[i]["BranchID"] + ',';
}
dataarray = dataarray.replace(/,\s*$/, "");
var data = dataarray.split(",");
$("#BrachIDs").val(data);
$("#BrachIDs").change();
if (data.length > 0) {
for (var i = 0; i < data.length; i++) {
$("#BrachIDs option").filter("[value='" + data[i] + "']").attr('disabled', false);
}
}
}
else {
$("#BrachIDs option").each(function () {
var idParent = $(this).parent().attr("id");
this.disabled = false;
});
$("#BrachIDs option:selected").removeAttr("selected");
}
if (ShowErrorOK(er)) {
$('#GLCode').val('');
}else{
RowToFillValues(c)
}
} catch (e) {
//console.log(e + " GetAccountNoandLevel "); document.getElementById("DisplayErrorMessage").innerText = e.message; $('#btnTriggerMessage').click(); execCode = false; return false;
console.log(e + " GetAccountNoandLevel "); console.log(e.message)
}
},
error: function (err) {
//console.log(err.responseText); document.getElementById("DisplayErrorMessage").innerText = "AJAX error in request: " + JSON.stringify(err.statusText + " " + err.status) + " GetAccountNoandLevel::Unable To Get Details"; $('#btnTriggerMessage').click(); execCode = false; return false;
console.log("AJAX error in request: " + JSON.stringify(err.statusText + " " + err.status) + " GetAccountNoandLevel::Unable To Get Details")
}
});
if (execCode) {
}
}
The response data is showing undefined...
you don't need serialize and parse manually, it will be done automaticaly
return new JsonResult(new { AccountLevels=AL, ReturnBranches=dt, Returnerror= Returnerror });
and ajax
var c = data.AccountLevels;
var b = data.ReturnBranches;
var er = data.Returnerror;
I am trying to flatten properties (i.e. objects, array) of a JSON object, but keep the original properties the same, and turn non-scalar properties into strings.
(I'm doing this because when I use the flat npm package, arrays/objects are flattened, but object keys are surrounded by '' , like in 'task_status.0.data' and do not get stored into AzureTables). If there is a way to fix that and de-string that, it would be an ok solution as well...)
Here's an example you could run on jsfiddle.net
var obj1 = {
"studentId": "abc",
"task_status": [
{
"status":"Current",
"date":516760078
},
{
"status":"Late",
"date":1516414446
}
],
"student_plan": "n"
}
FlattenJson = function(obj){
keys = Object.keys(obj);
var newObj = {};
for(var i=0; i<keys.length; i++){
var theType = typeof(obj[keys[i]]);
console.log(theType);
if(theType === 'array' || theType === 'object'){
console.log(JSON.stringify(obj[keys[i]]));
newObj[keys[i]] = "\"" + JSON.stringify(obj[keys[i]]) + "\"";
}
newObj[keys[i]] = obj[keys[i]];
}
return newObj;
}
var newObj1 = FlattenJson(obj1);
console.log(newObj1, obj1);
However, the newobj1 contains the same original array, instead of a string. How would I fix this?
UPDATED: Thanks to this you have a solution.
var obj1 = {
"studentId": "abc",
"task_status": [
{
"status":"Current",
"date":516760078
},
{
"status":"Late",
"date":1516414446
}
],
"student_plan": "n"
}
function customToString (obj) {
var str = '{';
for (var prop in obj) {
if (obj.hasOwnProperty(prop)) {
str += prop + ':"' + obj[prop] + '",';
}
}
return str.replace(/.$/,'') + '}';
}
var flattenObject = function(ob) {
var toReturn = {};
for (var i in ob) {
if (!ob.hasOwnProperty(i)) continue;
if ((typeof ob[i]) == 'object') {
var flatObject = flattenObject(ob[i]);
for (var x in flatObject) {
if (!flatObject.hasOwnProperty(x)) continue;
toReturn[i + '.' + x] = flatObject[x];
}
} else {
toReturn[i] = ob[i];
}
}
return toReturn;
};
var newObj1 = flattenObject(obj1);
console.log(newObj1, obj1);
document.write(customToString(newObj1));
When running our AngularJS app in IE11 everything looks great in the debugger, but when our app encodes the data as JSON to save to our database, we get bad results.
Our app obtains a record from our database, then some manipulation is done and then the data is saved back to the server from another model.
Here is the data I got back from the server in the setAttendanceGetSInfo() function below:
{"data":{"Start":"2014-10-16T19:36:00Z","End":"2014-10-16T19:37:00Z"},
This is the code used to "convert the data" to 3 properties in our model:
var setAttendanceGetSInfo = function (CourseId, PID) {
return setAttendanceInfo(CourseId, PID)
.then(function (result) {
return $q.all([
$http.get("../api/Axtra/getSInfo/" + model.event.Id),
$http.get("../api/Axtra/GetStartAndEndDateTime/" + aRow.Rid)
]);
}).then(function (result) {
var r = result.data;
var e = Date.fromISO(r.Start);
var f = Date.fromISO(r.End);
angular.extend(model.event, {
examDate: new Date(e).toLocaleDateString(),
examStartTime: (new Date(e)).toLocaleTimeString(),
examEndTime: (new Date(f)).toLocaleTimeString()
});
return result.sInfo;
});
};
fromISO is defined as:
(function(){
var D= new Date('2011-06-02T09:34:29+02:00');
if(!D || +D!== 1307000069000){
Date.fromISO= function(s){
var day, tz,
rx=/^(\d{4}\-\d\d\-\d\d([tT ][\d:\.]*)?)([zZ]|([+\-])(\d\d):(\d\d))?$/,
p= rx.exec(s) || [];
if(p[1]){
day= p[1].split(/\D/);
for(var i= 0, L= day.length; i<L; i++){
day[i]= parseInt(day[i], 10) || 0;
};
day[1]-= 1;
day= new Date(Date.UTC.apply(Date, day));
if(!day.getDate()) return NaN;
if(p[5]){
tz= (parseInt(p[5], 10)*60);
if(p[6]) tz+= parseInt(p[6], 10);
if(p[4]== '+') tz*= -1;
if(tz) day.setUTCMinutes(day.getUTCMinutes()+ tz);
}
return day;
}
return NaN;
}
}
else{
Date.fromISO= function(s){
return new Date(s);
}
}
})()
Take a look at the screenshot of the event model data:
But, if I eval the event model using JSON.stringify(model.event), I get this:
{\"examDate\":\"?10?/?16?/?2014\",\"examStartTime\":\"?2?:?44?:?00? ?PM\",\"examEndTime\":\"?2?:?44?:?00? ?PM\"}
And this is the JSON encoded data that actually got stored on the DB:
"examDate":"¿10¿/¿16¿/¿2014","examStartTime":"¿2¿:¿36¿:¿00¿ ¿PM","examEndTime":"¿2¿:¿37¿:¿00¿ ¿PM"
What is wrong here and how can I fix this? It works exactly as designed in Chrome and Firefox. I have not yet tested on Safari or earlier versions of IE.
The toJSON for the date class isn't defined perfectly the same for all browsers.
(You can see a related question here: Discrepancy in JSON.stringify of date values in different browsers
I would suspect that you have a custom toJSON added to the Date prototype since your date string doesn't match the standard and that is likely where your issue is. Alternatively, you can use the Date toJSON recommended in the above post to solve your issues.
First, I modified the fromISO prototype to this:
(function () {
var D = new Date('2011-06-02T09:34:29+02:00');
if (!D || +D !== 1307000069000) {
Date.fromISO = function (s) {
var D, M = [], hm, min = 0, d2,
Rx = /([\d:]+)(\.\d+)?(Z|(([+\-])(\d\d):(\d\d))?)?$/;
D = s.substring(0, 10).split('-');
if (s.length > 11) {
M = s.substring(11).match(Rx) || [];
if (M[1]) D = D.concat(M[1].split(':'));
if (M[2]) D.push(Math.round(M[2] * 1000));// msec
}
for (var i = 0, L = D.length; i < L; i++) {
D[i] = parseInt(D[i], 10);
}
D[1] -= 1;
while (D.length < 6) D.push(0);
if (M[4]) {
min = parseInt(M[6]) * 60 + parseInt(M[7], 10);// timezone not UTC
if (M[5] == '+') min *= -1;
}
try {
d2 = Date.fromUTCArray(D);
if (min) d2.setUTCMinutes(d2.getUTCMinutes() + min);
}
catch (er) {
// bad input
}
return d2;
}
}
else {
Date.fromISO = function (s) {
return new Date(s);
}
}
Date.fromUTCArray = function (A) {
var D = new Date;
while (A.length < 7) A.push(0);
var T = A.splice(3, A.length);
D.setUTCFullYear.apply(D, A);
D.setUTCHours.apply(D, T);
return D;
}
Date.toJSON = function (key) {
return isFinite(this.valueOf()) ?
this.getUTCFullYear() + '-' +
f(this.getUTCMonth() + 1) + '-' +
f(this.getUTCDate()) + 'T' +
f(this.getUTCHours()) + ':' +
f(this.getUTCMinutes()) + ':' +
f(this.getUTCSeconds()) + 'Z' : null;
};
})()
Then I added moment.js and formatted the dates when they get stored:
var SaveAffRow = function () {
// make sure dates on coursedate and event are correct.
var cd = model.a.courseDate;
var ed = model.event.examDate;
var est = model.event.examStartTime;
var eet = model.event.examEndTime;
model.a.courseDate = moment(cd).format("MM/DD/YYYY");
model.event.examDate = moment(ed).format("MM/DD/YYYY");
model.event.examStartTime = moment(est).format("MM/DD/YYYY hh:mm A");
model.event.examEndTime = moment(eet).format("MM/DD/YYYY hh:mm A");
affRow.DocumentsJson = angular.toJson({a: model.a, event: model.event});
var aff = {};
if (affRow.Id != 0)
aff = affRow.$update({ Id: affRow.Id });
else
aff = affRow.$save({ Id: affRow.Id });
return aff;
};
and when they get read (just in case they are messed up already):
var setAttendanceGetSInfo = function (CourseId, PID) {
return setAttendanceInfo(CourseId, PID)
.then(function (result) {
return $q.all([
$http.get("../api/Axtra/getSInfo/" + model.event.Id),
$http.get("../api/Axtra/GetStartAndEndDateTime/" + aRow.Rid)
]);
}).then(function (result) {
var r = result.data;
var e = Date.fromISO(r.Start);
var f = Date.fromISO(r.End);
angular.extend(model.event, {
examDate: moment(e).format("MM/DD/YYYY"),
examStartTime: moment(e).format("MM/DD/YYYY hh:mm A"),
examEndTime: moment(f).format("MM/DD/YYYY hh:mm A")
});
return result.sInfo;
});
};
Can anyone please tell me how to put count in duplicate values in angular JSON array:
My actual array is given below:
$scope.datas.resultsOrder =['Data1','Data2','Data3','Data3','Data4','Data4'];
in the above array Data3 and Data4 is repeating twice, so i need it to come as Data3_1, Data3_2, Data4_1, Data4_2 order within that array like as shown below:
$scope.datas.resultsOrder =['Data1','Data2','Data3_1',
'Data3_2','Data4_1','Data4_2'];
Also the values within that array are dynamic values and not static
Can anyone please tell me some solution for this?
I like UnderscoreJS for these kind of problems. In underscoreJS you can do something like this:
function uniq(array) {
var grouped = _.groupBy(array);
return _.reduce(grouped, function(result, x) {
if(x.length > 1) {
_.each(x, function(val, key) {
result.push(val + '_' + (key + 1));
});
} else {
result.push(x[0]);
}
return result;
},[]);
}
uniq(['Data1','Data2','Data3','Data3','Data4','Data4']);
// ["Data1", "Data2", "Data3_1", "Data3_2", "Data4_1", "Data4_2"]
You can do this:
function transform(arr) {
var c = {};
for (var i = 0; i < arr.length; i++) {
var ar = arr[i];
if(! (ar in c) ) {
c[ar] = 0;
}
c[ar]++;
}
var res = []
;
for(var d in c) {
if(c.hasOwnProperty(d)) {
var l = c[d]
;
if(l === 1) {
res.push(d);
continue;
}
for(var i = 0; i < l; i++) {
res.push(d + '_' + (i + 1));
}
}
}
return res;
}
$scope.datas.resultsOrder = transform(passTheArrayHere);
Note: No guarantee for order.
Is there any way to increase the chrome.storage.sync.QUOTA_BYTES_PER_ITEM ?
For me, the default 4096 Bytes is a little bit short.
I tried to execute
chrome.storage.sync.QUOTA_BYTES_PER_ITEM = 8192;
However, it seems that the actual limit doesn't change.
How can I do this?
No, QUOTA_BYTES_PER_ITEM is there for reference only; it is not a settable value. You could use the value of QUOTA_BYTES_PER_ITEM to split an item up into multiple items, though:
function syncStore(key, objectToStore, callback) {
var jsonstr = JSON.stringify(objectToStore);
var i = 0;
var storageObj = {};
// split jsonstr into chunks and store them in an object indexed by `key_i`
while(jsonstr.length > 0) {
var index = key + "_" + i++;
// since the key uses up some per-item quota, see how much is left for the value
// also trim off 2 for quotes added by storage-time `stringify`
var valueLength = chrome.storage.sync.QUOTA_BYTES_PER_ITEM - index.length - 2;
// trim down segment so it will be small enough even when run through `JSON.stringify` again at storage time
var segment = jsonstr.substr(0, valueLength);
while(JSON.stringify(segment).length > valueLength)
segment = jsonstr.substr(0, --valueLength);
storageObj[index] = segment;
jsonstr = jsonstr.substr(valueLength);
}
// store all the chunks
chrome.storage.sync.set(storageObj, callback);
}
Then write an analogous fetch function that fetches by key and glues the object back together.
just modify answer of #apsilliers
function syncStore(key, objectToStore) {
var jsonstr = JSON.stringify(objectToStore);
var i = 0;
var storageObj = {};
// split jsonstr into chunks and store them in an object indexed by `key_i`
while(jsonstr.length > 0) {
var index = key + "_" + i++;
// since the key uses up some per-item quota, see how much is left for the value
// also trim off 2 for quotes added by storage-time `stringify`
const maxLength = chrome.storage.sync.QUOTA_BYTES_PER_ITEM - index.length - 2;
var valueLength = jsonstr.length;
if(valueLength > maxLength){
valueLength = maxLength;
}
// trim down segment so it will be small enough even when run through `JSON.stringify` again at storage time
//max try is QUOTA_BYTES_PER_ITEM to avoid infinite loop
var segment = jsonstr.substr(0, valueLength);
for(let i = 0; i < chrome.storage.sync.QUOTA_BYTES_PER_ITEM; i++){
const jsonLength = JSON.stringify(segment).length;
if(jsonLength > maxLength){
segment = jsonstr.substr(0, --valueLength);
}else {
break;
}
}
storageObj[index] = segment;
jsonstr = jsonstr.substr(valueLength);
}
also function to read each partition and merge again
function syncGet(key, callback) {
chrome.storage.sync.get(key, (data) => {
console.log(data[key]);
console.log(typeof data[key]);
if(data != undefined && data != "undefined" && data != {} && data[key] != undefined && data[key] != "undefined"){
const keyArr = new Array();
for(let i = 0; i <= data[key].count; i++) {
keyArr.push(`${data[key].prefix}${i}`)
}
chrome.storage.sync.get(keyArr, (items) => {
console.log(data)
const keys = Object.keys( items );
const length = keys.length;
let results = "";
if(length > 0){
const sepPos = keys[0].lastIndexOf("_");
const prefix = keys[0].substring(0, sepPos);
for(let x = 0; x < length; x ++){
results += items[`${prefix }_${x}`];
}
callback(JSON.parse(results));
return;
}
callback(undefined);
});
} else {
callback(undefined);
}
});
}
it tested and it works for my case
this is a better version of #uncle bob's functions, working with manifest v3 (you can use it just like how you can use the normal sync.set or sync.get function)
NOTE: it only works with JSONs (arrays and objects) since a string shouldn't be that long
let browserServices;
if (typeof browser === "undefined") {
browserServices = chrome;
} else {
browserServices = browser;
}
function syncSet(obj = {}) {
return new Promise((resolve, reject) => {
var storageObj = {};
for (let u = 0; u < Object.keys(obj).length; u++) {
const key = Object.keys(obj)[u];
const objectToStore = obj[key]
var jsonstr = JSON.stringify(objectToStore);
var i = 0;
// split jsonstr into chunks and store them in an object indexed by `key_i`
while (jsonstr.length > 0) {
var index = key + "USEDTOSEPERATE" + i++;
// since the key uses up some per-item quota, see how much is left for the value
// also trim off 2 for quotes added by storage-time `stringify`
const maxLength = browserServices.storage.sync.QUOTA_BYTES_PER_ITEM - index.length - 2;
var valueLength = jsonstr.length;
if (valueLength > maxLength) {
valueLength = maxLength;
}
// trim down segment so it will be small enough even when run through `JSON.stringify` again at storage time
//max try is QUOTA_BYTES_PER_ITEM to avoid infinite loop
var segment = jsonstr.substring(0, valueLength);
var jsonLength = JSON.stringify(segment).length;
segment = jsonstr.substring(0, valueLength = (valueLength - (jsonLength - maxLength) - 1));
for (let i = 0; i < browserServices.storage.sync.QUOTA_BYTES_PER_ITEM; i++) {
jsonLength = JSON.stringify(segment).length;
if (jsonLength > maxLength) {
segment = jsonstr.substring(0, --valueLength);
} else {
break;
}
}
storageObj[index] = segment;
jsonstr = jsonstr.substring(valueLength, Infinity);
}
}
chrome.storage.sync.set(storageObj).then(() => {
resolve()
})
})
}
function syncGet(uniqueKeys = []) {
return new Promise((resolve, reject) => {
browserServices.storage.sync.get(null).then((data) => {
const keyArr = Object.keys(data).filter(e => uniqueKeys.filter(j => e.indexOf(j) == 0).length > 0)
browserServices.storage.sync.get(keyArr).then((items) => {
var results = {};
for (let i = 0; i < uniqueKeys.length; i++) {
const uniqueKey = uniqueKeys[i];
const keysFiltered = keyArr.filter(e => e.split("USEDTOSEPERATE")[0] == uniqueKey)
if (keysFiltered.length > 0) {
results[uniqueKey] = ""
for (let x = 0; x < keysFiltered.length; x++) {
results[uniqueKey] += items[`${keysFiltered[x]}`];
}
results[uniqueKey] = JSON.parse(results[uniqueKey])
}
}
resolve(results)
});
});
})
}
example of usage:
syncSet({
"keyTest": ["a lot of text"],
"keyTest1": ["a lot of text"]
}
)
syncGet(["keyTest","keyTest1"]).then(results=>console.log(results))
// {keyTest:["a lot of text"],keyTest1:["a lot of text"]}