Setting callback on Polymer iron-ajax - polymer

Is it possible to set/change the callbacks on iron-ajax?
I have a variable number of requests to make with different callbacks for each.

As Scott has said in the comment you should use a separate iron-ajax for each request. HOWEVER, if you want to make requests dynamically, I tend to use a dynamically created iron-request - if only because it returns a promise and I can carry that around. Here is a little example
var dataPromise = new Promise(function(accept, reject) {
var sendOptions = {
url: '/api/queries',
method: 'POST',
handleAs: 'json',
headers: {'content-type': 'application/json'}
};
body = {};
body.name = name;
if (useId) {
body.id = parent.id;
}
if (useDates) {
body.startdate = parent.startdate;
body.enddate = parent.enddate;
}
sendOptions.body = body;
var request = document.createElement('iron-request');
request.send(sendOptions).then(function() {
accept(request.response);
});
});
and then I can pass the dataPromise around, and when I want need the results of the request I do (in my case in a completely separate element) ...
open: function(dataPromise, params, x, y) {
var self = this;
this.x = x;
this.y = y;
dataPromise.then(function(data) {
self.title = data.name;
self.heading = data.heading;
self.data = data.data;
self.$.griddialog.open();
});
this.params = params;
},

Related

Google Data Studio Community Connector getData() not working as expected

function getData(request){
try{
var options = {
'method' : 'post',
'contentType': 'application/json',
'payload' : JSON.stringify(request)
};
response=UrlFetchApp.fetch(getDataUrl, options);
resData = JSON.parse(response.getContentText())
return resData
}catch (e) {
e = (typeof e === 'string') ? new Error(e) : e;
Logger.log("Catch", e);
throw e;
}
}
The the above is my getData() function.
My isAdminUser() returns true.
When I try to visualize my data, I get the following error
Data Set Configuration Error
Data Studio cannot connect to your data set.
There was an error requesting data from the community connector. Please report the issue to the provider of this community connector if this issue persists.
Error ID: 3d11b88b
https://i.stack.imgur.com/x3Hki.png
The error code changes every time I refresh data and I can't find any dictionary to map the error id to an error
I tried debugging by logging the request parameter, response.getContentText() and resData variable to make sure I my data is formatted correctly.
Following are the logs printed in Stackdriver logs
request
{configParams={/Personal config data/}, fields=[{name=LASTNAME}]}
response.getContentText()
{"schema":[{"name":"LASTNAME","dataType":"STRING"}],"rows":[{"values":["test"]},{"values":["test"]},{"values":["Dummy"]},{"values":["One"]},{"values":["Nagargoje"]},{"values":[""]},{"values":[""]},{"values":[""]},{"values":[""]},{"values":[""]}],"filtersApplied":false}
resData
{rows=[{values=[test]}, {values=[test]}, {values=[Dummy]},
{values=[One]}, {values=[Nagargoje]}, {values=[]}, {values=[]},
{values=[]}, {values=[]}, {values=[]}], filtersApplied=false,
schema=[{name=LASTNAME, dataType=STRING}]}
I am not sure what is wrong with my getData() function.
The Object that I am returning seems to match the structure given here https://developers.google.com/datastudio/connector/reference#getdata
So there was no issue with my getData() function, the issue existed in the manifest file.
I was searching about passing parameter via URL and I stumbled upon a field called
dataStudio.useQueryConfig and added that to my manifest file and set its value to true.
Google Data studio was expecting me to return a query Config for getData().
But what I really wanted was this.
Anyways, I was able to debug it thanks to Matthias for suggesting me to take a look at Open-Source implementations
I implemented JSON connect which worked fine, so I Logged what it was returning in getData() and used that format/structure in my code, but my connector still didn't work.
My next assumption was maybe there is something wrong with my getSchema() return value. So I logged that as well and then copy pasted the hard coded value of both getData() and getSchema() return varaibles from JSON connect.
And even that didn't work, so my last bet was there must be something wrong with the manifest file, maybe the dummy links I added in it must be the issue. Then, after carrying out field by comparison I was finally able to get my community connector working.
This would have been easier to debug if the error messages were a bit helpful and didn't seem so generic.
First: You can always check out the Open-Source implementations that others did for custom Google Data Studio connectors. They are a great source if information. Fore more information checkout the documentation on Open Source Community Connectors.
Second: My implementation is for a time tracking system thus having confidential GDPR relevant data. That's why I can not just give you response messages. But I assembled this code. It contains authentifiction, HTTP GET data fetch and data conversions. Explanation is below the code. Again, checkout the open-source connectors if you need further assistance.
var cc = DataStudioApp.createCommunityConnector();
const URL_DATA = 'https://www.myverysecretdomain.com/api';
const URL_PING = 'https://www.myverysecretdomain.com/ping';
const AUTH_USER = 'auth.user'
const AUTH_KEY = 'auth.key';
const JSON_TAG = 'user';
String.prototype.format = function() {
// https://coderwall.com/p/flonoa/simple-string-format-in-javascript
a = this;
for (k in arguments) {
a = a.replace("{" + k + "}", arguments[k])
}
return a
}
function httpGet(user, token, url, params) {
try {
// this depends on the URL you are connecting to
var headers = {
'ApiUser': user,
'ApiToken': token,
'User-Agent': 'my super freaky Google Data Studio connector'
};
var options = {
headers: headers
};
if (params && Object.keys(params).length > 0) {
var params_ = [];
for (const [key, value] of Object.entries(params)) {
var value_ = value;
if (Array.isArray(value))
value_ = value.join(',');
params_.push('{0}={1}'.format(key, encodeURIComponent(value_)))
}
var query = params_.join('&');
url = '{0}?{1}'.format(url, query);
}
var response = UrlFetchApp.fetch(url, options);
return {
code: response.getResponseCode(),
json: JSON.parse(response.getContentText())
}
} catch (e) {
throwConnectorError(e);
}
}
function getCredentials() {
var userProperties = PropertiesService.getUserProperties();
return {
username: userProperties.getProperty(AUTH_USER),
token: userProperties.getProperty(AUTH_KEY)
}
}
function validateCredentials(user, token) {
if (!user || !token)
return false;
var response = httpGet(user, token, URL_PING);
if (response.code == 200)
console.log('API key for the user %s successfully validated', user);
else
console.error('API key for the user %s is invalid. Code: %s', user, response.code);
return response;
}
function getAuthType() {
var cc = DataStudioApp.createCommunityConnector();
return cc.newAuthTypeResponse()
.setAuthType(cc.AuthType.USER_TOKEN)
.setHelpUrl('https://www.myverysecretdomain.com/index.html#authentication')
.build();
}
function resetAuth() {
var userProperties = PropertiesService.getUserProperties();
userProperties.deleteProperty(AUTH_USER);
userProperties.deleteProperty(AUTH_KEY);
console.info('Credentials have been reset.');
}
function isAuthValid() {
var credentials = getCredentials()
if (credentials == null) {
console.info('No credentials found.');
return false;
}
var response = validateCredentials(credentials.username, credentials.token);
return (response != null && response.code == 200);
}
function setCredentials(request) {
var credentials = request.userToken;
var response = validateCredentials(credentials.username, credentials.token);
if (response == null || response.code != 200) return { errorCode: 'INVALID_CREDENTIALS' };
var userProperties = PropertiesService.getUserProperties();
userProperties.setProperty(AUTH_USER, credentials.username);
userProperties.setProperty(AUTH_KEY, credentials.token);
console.info('Credentials have been stored');
return {
errorCode: 'NONE'
};
}
function throwConnectorError(text) {
DataStudioApp.createCommunityConnector()
.newUserError()
.setDebugText(text)
.setText(text)
.throwException();
}
function getConfig(request) {
// ToDo: handle request.languageCode for different languages being displayed
console.log(request)
var params = request.configParams;
var config = cc.getConfig();
// ToDo: add your config if necessary
config.setDateRangeRequired(true);
return config.build();
}
function getDimensions() {
var types = cc.FieldType;
return [
{
id:'id',
name:'ID',
type:types.NUMBER
},
{
id:'name',
name:'Name',
isDefault:true,
type:types.TEXT
},
{
id:'email',
name:'Email',
type:types.TEXT
}
];
}
function getMetrics() {
return [];
}
function getFields(request) {
Logger.log(request)
var fields = cc.getFields();
var dimensions = this.getDimensions();
var metrics = this.getMetrics();
dimensions.forEach(dimension => fields.newDimension().setId(dimension.id).setName(dimension.name).setType(dimension.type));
metrics.forEach(metric => fields.newMetric().setId(metric.id).setName(metric.name).setType(metric.type).setAggregation(metric.aggregations));
var defaultDimension = dimensions.find(field => field.hasOwnProperty('isDefault') && field.isDefault == true);
var defaultMetric = metrics.find(field => field.hasOwnProperty('isDefault') && field.isDefault == true);
if (defaultDimension)
fields.setDefaultDimension(defaultDimension.id);
if (defaultMetric)
fields.setDefaultMetric(defaultMetric.id);
return fields;
}
function getSchema(request) {
var fields = getFields(request).build();
return { schema: fields };
}
function convertValue(value, id) {
// ToDo: add special conversion if necessary
switch(id) {
default:
// value will be converted automatically
return value[id];
}
}
function entriesToDicts(schema, data, converter, tag) {
return data.map(function(element) {
var entry = element[tag];
var row = {};
schema.forEach(function(field) {
// field has same name in connector and original data source
var id = field.id;
var value = converter(entry, id);
// use UI field ID
row[field.id] = value;
});
return row;
});
}
function dictsToRows(requestedFields, rows) {
return rows.reduce((result, row) => ([...result, {'values': requestedFields.reduce((values, field) => ([...values, row[field]]), [])}]), []);
}
function getParams (request) {
var schema = this.getSchema();
var params;
if (request) {
params = {};
// ToDo: handle pagination={startRow=1.0, rowCount=100.0}
} else {
// preview only
params = {
limit: 20
}
}
return params;
}
function getData(request) {
Logger.log(request)
var credentials = getCredentials()
var schema = getSchema();
var params = getParams(request);
var requestedFields; // fields structured as I want them (see above)
var requestedSchema; // fields structured as Google expects them
if (request) {
// make sure the ordering of the requested fields is kept correct in the resulting data
requestedFields = request.fields.filter(field => !field.forFilterOnly).map(field => field.name);
requestedSchema = getFields(request).forIds(requestedFields);
} else {
// use all fields from schema
requestedFields = schema.map(field => field.id);
requestedSchema = api.getFields(request);
}
var filterPresent = request && request.dimensionsFilters;
//var filter = ...
if (filterPresent) {
// ToDo: apply request filters on API level (before the API call) to minimize data retrieval from API (number of rows) and increase speed
// see https://developers.google.com/datastudio/connector/filters
// filter = ... // initialize filter
// filter.preFilter(params); // low-level API filtering if possible
}
// get HTTP response; e.g. check for HTTT RETURN CODE on response.code if necessary
var response = httpGet(credentials.username, credentials.token, URL_DATA, params);
// get JSON data from HTTP response
var data = response.json;
// convert the full dataset including all fields (the full schema). non-requested fields will be filtered later on
var rows = entriesToDicts(schema, data, convertValue, JSON_TAG);
// match rows against filter (high-level filtering)
//if (filter)
// rows = rows.filter(row => filter.match(row) == true);
// remove non-requested fields
var result = dictsToRows(requestedFields, rows);
console.log('{0} rows received'.format(result.length));
//console.log(result);
return {
schema: requestedSchema.build(),
rows: result,
filtersApplied: filter ? true : false
};
}
A sample request that filters for all users with names starting with J.
{
configParams={},
dateRange={
endDate=2020-05-14,
startDate=2020-04-17
},
fields=[
{name=name}
],
scriptParams={
lastRefresh=1589543208040
},
dimensionsFilters=[
[
{
values=[^J.*],
operator=REGEXP_EXACT_MATCH,
type=INCLUDE,
fieldName=name
}
]
]
}
The JSON data returned by the HTTP GET contains all fields (full schema).
[ { user:
{ id: 1,
name: 'Jane Doe',
email: 'jane#doe.com' } },
{ user:
{ id: 2,
name: 'John Doe',
email: 'john#doe.com' } }
]
Once the data is filtered and converted/transformed, you'll get this result, which is perfectly displayed by Google Data Studio:
{
filtersApplied=true,
schema=[
{
isDefault=true,
semantics={
semanticType=TEXT,
conceptType=DIMENSION
},
label=Name,
name=name,
dataType=STRING
}
],
rows=[
{values=[Jane Doe]},
{values=[John Doe]}
]
}
getData should return data for only the requested fields. In request.fields should have the list of all requested fields. Limit your data for those fields only and then send the parsed data back.

get some word from return String - rest function

In that case I've already tried to get the returned JSON, but when I use the JSON.parse and the JSON.stringify it returns undefined. If I do not use and leave only the data = data.toString('utf8');, return:
!!xxxxxxxxxxxxxxxxxxxxxxxxxxxxx.rest.schema.CtLoginResp {error: null, sessionID: 6dMX4uGVurFdLRL+hW4F2kIW}
And I want the sessionid... But If i try get this, return undefined, I try JSON.parse, JSON.stringify, see that:
My code:
var Client = require('./lib/node-rest-client').Client;
var client = new Client();
var dataLogin = {
data: { "userName":"xxxxxxxxxxx","password":"xxxxxxxxxxxxx","platform":"xxxxxxx" },
headers: { "Content-Type": "application/json" }
};
client.registerMethod("postMethod", "xxxxxxxxxxxxxxxxxxxx/login", "POST");
client.methods.postMethod(dataLogin, function (data, response) {
// parsed response body as js object
// console.log(data);
// raw response
if(Buffer.isBuffer(data)){ // if i remove buffer return is 21 22 etc etc
data = data.toString('utf8'); // this return all but String
var outputTe = data;
var res = outputTe.split(" ", 4);
res = res[3].split("}", 1);
}
console.log(res);
});
Image return:
In the case if i does not use Buffer return is 21 34 56 etc.
But if I use return is all the STRING data.toString(); inside the image...
EDIT.
I try use split but return just the string "sessionid" see the other image:
I try same code inside W3 schools and does not work inside my code but in W3 school test works fine:
1)
2)
In the case I use regex:
client.methods.postMethod(dataLogin, function (data, response) {
if(Buffer.isBuffer(data)){
data = data.toString('utf8');
console.log(data);
var re = /(sessionID: )([^,}]*)/g;
var match = re.exec(data);
var sessionid = match[2]
console.log(sessionid);
openRequest(numberOrigin);
}
});

How to extract JSON data received in controller in a String variable

Could you please let me know how to extract JSON data received in a string variable in controller. Please see the attachment.Thanks.
$("#btn1").on("click", function () {
var i = new Array();
var j = 0;
$("#sl1").multiselect("getChecked").map(function () {
alert(this.value);
i.push(this.value);
//i[j] = this.value;
//j++;
}).get();
var postData = { values: i };
jQuery.ajaxSettings.traditional = true;
$.post('/TodoList/searchdata', postData, function (data) {
alert(data.Result);
});
//$.ajax({
// type: "POST",
// url: "/TodoList/searchdata",
// data: postData,
// success: function (data) {
// alert(data.Result);
// },
// dataType: "json",
// traditional: true
//});
});
Controller code:-
public void searchdata(String[] values)
{
//{
// JavaScriptSerializer js = new JavaScriptSerializer();
// List<String[][]> data=js.Deserialize<List<String[][]>>(i);
Console.WriteLine(values);
}
You can use Newtonsoft Json library https://www.nuget.org/packages/Newtonsoft.Json/
So As mentioned in the below link use it like below
string json = #"{ 'Email': 'james#example.com', 'Active': true,
'CreatedDate': '2013-01-20T00:00:00Z', 'Roles': [
'User', 'Admin' ] }";
Account account = JsonConvert.DeserializeObject(json);
if you doesn't have model , just use like below
var model = JsonConvert.DeserializeObject(json);
the check the below link
http://www.newtonsoft.com/json/help/html/deserializeobject.htm
Try this
JavaScriptSerializer js = new JavaScriptSerializer();
var data=js.Deserialize<Dictionary<string, List<string>>>(i);
Use This Class :
public class JsonAttributeClass<T> where T:class ,new()
{
public static string EntityToJsonConvertor(T entity)
{
string json = JsonConvert.SerializeObject(entity);
return json;
}
public static T JsonToEntityConvertor(string json)
{
var entity = JsonConvert.DeserializeObject<T>(json);
return entity;
}
}

Call multiple JSON data/files in one getJson request

I have this code:
var graphicDataUrl = 'graphic-data.json';
var webDataUrl = 'web-data.json';
var templateHtml = 'templating.html';
var viewG = $('#view-graphic');
var viewW = $('#view-web');
$.getJSON(dataUrls, function(data) {
$.get(templateHtml, function(template) {
template = Handlebars.compile(template);
var example = template({ works: data });
viewG.html(example);
viewW.html(example);
});
});
What is the best way for call both webDataUrl and graphicDataUrl JSONs and use their data in order to display them in two different div (#viewG and #viewW)?
The best way is to do each one individually, and to handle error conditions:
$.getJSON(graphicDataUrl)
.then(function(data) {
// ...worked, put it in #view-graphic
})
.fail(function() {
// ...didn't work, handle it
});
$.getJSON(webDataUrl, function(data) {
.then(function(data) {
// ...worked, put it in #view-web
})
.fail(function() {
// ...didn't work, handle it
});
That allows the requests to happen in parallel, and updates the page as soon as possible when each request completes.
If you want to run the requests in parallel but wait to update the page until they both complete, you can do that with $.when:
var graphicData, webData;
$.when(
$.getJSON(graphicDataUrl, function(data) {
graphicData = data;
}),
$.getJSON(webDataUrl, function(data) {
webData = data;
})
).then(function() {
if (graphicData) {
// Worked, put graphicData in #view-graphic
}
else {
// Request for graphic data didn't work, handle it
}
if (webData) {
// Worked, put webData in #view-web
}
else {
// Request for web data didn't work, handle it
}
});
...but the page may seem less responsive since you're not updating when the first request comes back, but only when both do.
Just in case it is useful to anyone else who may come across this — and thanks to the Promise advances in jQuery — T.J. Crowder's answer can now be improved into one succinct and general function:
/**
* Load multiple JSON files.
*
* Example usage:
*
* jQuery.getMultipleJSON('file1.json', 'file2.json')
* .fail(function(jqxhr, textStatus, error){})
* .done(function(file1, file2){})
* ;
*/
jQuery.getMultipleJSON = function(){
return jQuery.when.apply(jQuery, jQuery.map(arguments, function(jsonfile){
return jQuery.getJSON(jsonfile);
})).then(function(){
var def = jQuery.Deferred();
return def.resolve.apply(def, jQuery.map(arguments, function(response){
return response[0];
}));
});
};
However the point about not giving any feedback to the user — whilst waiting for the full load — is a good one. So for those that prefer to give responsive feedback, here's a slightly more complicated version that supports progress.
/**
* Load multiple json files, with progress.
*
* Example usage:
*
* jQuery.getMultipleJSON('file1.json', 'file2.json')
* .progress(function(percent, count, total){})
* .fail(function(jqxhr, textStatus, error){})
* .done(function(file1, file2){})
* ;
*/
jQuery.getMultipleJSON = function(){
var
num = 0,
def = jQuery.Deferred(),
map = jQuery.map(arguments, function(jsonfile){
return jQuery.getJSON(jsonfile).then(function(){
def.notify(1/map.length * ++num, num, map.length);
return arguments;
});
})
;
jQuery.when.apply(jQuery, map)
.fail(function(){ def.rejectWith(def, arguments); })
.done(function(){
def.resolveWith(def, jQuery.map(arguments, function(response){
return response[0];
}));
})
;
return def;
};
This code is simple and you can access both response together in one function:
$.when(
$.getJSON(graphicDataUrl),
$.getJSON(webDataUrl)
).done(function(data1, data2) {
console.log(data1[0]);
console.log(data2[0]);
});

Node.js - Can I store writeable streams as JSON in Redis?

I am still working on fully understanding streams in node.js. If I create a writable stream, would I be able able to store the stream object as JSON in Redis, and then access it later, and continue writing to it (after JSON.parse)?
example:
var fs = require( 'fs' );
var redis = require( 'redis' );
var streamName = fs.createWriteStream(upfilePath, streamopts);
streamName = JSON.stringify(streamName);
rclient.set('streamJSON', streamName);
....
var myNewdata = 'whatever';
rclient.get('streamJSON', function (err, streamJSON) {
var recoveredStream = JSON.parse(streamJSON);
recoveredStream.write(myNewdata, function (err, written, buffer) {
//write successful??
}
}
You can't store variable references on redis. You would only need to store the filename, then reopen the stream with the a flag which allows you to append data to it.
I thought this was pretty an interesting question and created this that allows you to save the state of a stream and then use it later. But I don't see the point if you can just use the a flag. Might be useful for ReadableStreams though.
var fs = require('fs');
exports.stringify = function(stream) {
var obj = {
path: stream.path
, writable: stream.writable
, fd: stream.fd
, options: {
encoding: stream.encoding
, mode: stream.mode
}
};
if (stream.writable) {
obj.bytesWritten = stream.bytesWritten;
} else {
obj.options.bufferSize = stream.bufferSize;
obj.bytesRead = stream.bytesRead;
}
return JSON.stringify(obj);
};
exports.parse = function(json, callback) {
var obj = JSON.parse(json);
var stream;
if (obj.writable) {
obj.options.flags = 'a';
stream = fs.createWriteStream(obj.path, obj.options);
stream.bytesWritten = obj.bytesWritten;
} else {
stream = fs.createReadStream(obj.path, obj.options);
stream.bytesRead = obj.bytesRead;
}
// if stream was already opened, wait until it is
if (obj.fd !== null) {
stream.on('open', function() {
callback(null, stream);
});
} else {
process.nextTick(function() {
callback(null, stream);
});
}
return stream;
};