I am pulling data from a .csv file generated after a URL request using UrlFetchApp.fetch and storing the data using the logger.
Is there a way to write each row of fetched data from the logger into separate rows in a spreadsheet?
I did this quite a while ago. It's how I import data from website visitor logs. To review them. I don't use commas because you find them in the data too often. I use 3 tildes '~~~' instead and the lines are separated with a line feed '\n'.
But I basically split the lines and then fields on each line into array and feed them into the sheets one line at a time and usually I deal with 20 or 30 files about 100K or less. And I find that it loads the files very quickly.
function importData1(myFolderID,myFolderName,myFileName) {
var myFolderID = typeof(myFolderID) !== 'undefined' ? myFolderID : 'FolderID';
var myFileName = typeof(myFileName) !== 'undefined' ? myFileName : '';
if(myFileName && myFolderID)
{
var fi = DriveApp.getFolderById(myFolderID).getFilesByName(myFileName); // Selected IPLogYYMMDD.txt file
var ssid = SpreadsheetApp.getActive().getId();
var ss = SpreadsheetApp.openById(ssid);
if (fi.hasNext() ) // proceed if file exists in the IPlogs folder
{
var file = fi.next();
var data = file.getBlob().getDataAsString();
var lines = data.split('\n');
var newsheet = ss.insertSheet(myFolderName + '/' + myFileName);
var j=0;
for (var i=0; i<lines.length; i++ )
{
var fields = lines[i].split('~~~');
if(fields.length>=8)//There's supposed to be 8 or 9 fields
{
Logger.log('i=' + i + 'fields.length=' + fields.length);
newsheet.getRange(j+1, 1, 1,fields.length).setValues([fields]);
j=j+1;
}
}
}
}
else
{
displayStatus('Error Importing Data','Either Folder or File not found in importData1');
}
Most of the variables are easy to figure out. You'll probably be able to adapt it to what you need. And probably there will be several optional answers for you to chose from.
I am trying to retrieve firebase data and then inserting into tables using the the following code.
My output page:
But my table is only showing the random keys from my "JSON" file on the webpage.The logs are showing keys as well as the data.How can i put the random key with its data in table?
This is my error message:
DataTables warning: table id=example - Requested unknown parameter '1' for row 0.
$(document).ready(function()
{
var rootRef=firebase.database().ref().child("Orders/")
var table = $('#example').DataTable();
rootRef.on("child_added",snap =>
{
var dataSet = [snap.key, snap.val().Nombre];
table.rows.add([dataSet]).draw();
console.log(snap.key);
console.log(snap.val());
});
});
on your var dataset add all data from firebase you want to put on the data Table, it works
rootRef.on("child_added", snap => {
var dataSet = [snap.child("name").val(), snap.child("email").val(), snap.child("blood").val(), snap.child("Phone").val(), snap.child("Location").val()];
table.rows.add([dataSet]).draw();
Change:
var dataSet = [snap.key, snap.val().Nombre];
To:
var dataset = [snap.child("name").val(), snap.val().Nombre];
var rootRef = firebase.database().ref().child("Topics");
rootRef.on("child_added", snap => {
var topic_name = snap.child("topic_name").val();
var uid = snap.child("topic_id").val();
var downloadURL = snap.child("downloadURL").val();
$("#tableHeading").append("<tr><td>"+ uid +"</td><td>"+ topic_name +"</td><td><img src='"+downloadURL +"' height='80px' width='80px'/></td><td><button>action</button></td></tr>");
});
I am trying to get the following to work: A JSON is sent to my Google Script - Now I want to get specific values from that JSON Message and store them into specific rows of one specific Google Spreadsheet. So far so good, this is what I have :
function doPost(response) {
var sheets = SpreadsheetApp.openById('MY SHEET ID');
var dataAll = JSON.parse(response.getContentText());
var nR = getNextRow(sheets) + 1;
// RECORD DATA IN SPREADSHEET
sheets.getRangeByName('timestamp').getCell(nR,1).setValue(new Date());
sheets.getRangeByName('ticket_id').getCell(nR,1).setValue(dataAll);
}
function getNextRow(sheets) {
var timestamps = sheets.getRangeByName("timestamp").getValues();
for (i in timestamps) {
if(timestamps[i][0] == "") {
return Number(i);
break;
}}}
It should store the response and put it into a blank cell of the range "timestamp". But nothing happens at this point.
This is the JSON ( Body ) from JIRA:
{"timestamp":1483576902984,"webhookEvent":"jira:issue_created","issue_event_type_name":"issue_created","user":{"self":"https://xxx.atlassian.net/rest/api/2/user?username=admin","name":"admin","key":"admin","emailAddress":"test#mail.at","avatarUrls":{"48x48":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=48","24x24":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=24","16x16":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=16","32x32":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=32"},"displayName":"Max Mustermann [Administrator]","active":true,"timeZone":"Europe/Berlin"},"issue":{"id":"10057","self":"https://xxx.atlassian.net/rest/api/2/issue/10057","key":"TA-58","fields":{"issuetype":{"self":"https://xxx.atlassian.net/rest/api/2/issuetype/10104","id":"10104","description":"A problem which impairs or prevents the functions of the product.","iconUrl":"https://xxx.atlassian.net/secure/viewavatar?size=xsmall&avatarId=10303&avatarType=issuetype","name":"Bug","subtask":false,"avatarId":10303},"timespent":null,"project":{"self":"https://xxx.atlassian.net/rest/api/2/project/10000","id":"10000","key":"TA","name":"Test Area","avatarUrls":{"48x48":"https://xxx.atlassian.net/secure/projectavatar?avatarId=10324","24x24":"https://xxx.atlassian.net/secure/projectavatar?size=small&avatarId=10324","16x16":"https://xxx.atlassian.net/secure/projectavatar?size=xsmall&avatarId=10324","32x32":"https://xxx.atlassian.net/secure/projectavatar?size=medium&avatarId=10324"}},"customfield_10110":null,"fixVersions":[],"customfield_10111":null,"aggregatetimespent":null,"customfield_10112":"Not started","resolution":null,"customfield_10113":null,"customfield_10114":null,"customfield_10104":null,"customfield_10105":null,"customfield_10106":null,"customfield_10107":null,"customfield_10108":null,"customfield_10109":null,"resolutiondate":null,"workratio":-1,"lastViewed":null,"watches":{"self":"https://xxx.atlassian.net/rest/api/2/issue/TA-58/watchers","watchCount":0,"isWatching":false},"created":"2017-01-05T01:41:42.903+0100","priority":{"self":"https://xxx.atlassian.net/rest/api/2/priority/3","iconUrl":"https://xxx.atlassian.net/images/icons/priorities/medium.svg","name":"Medium","id":"3"},"customfield_10100":null,"customfield_10101":null,"customfield_10102":null,"customfield_10103":null,"labels":[],"timeestimate":null,"aggregatetimeoriginalestimate":null,"versions":[],"issuelinks":[],"assignee":null,"updated":"2017-01-05T01:41:42.903+0100","status":{"self":"https://xxx.atlassian.net/rest/api/2/status/10000","description":"","iconUrl":"https://xxx.atlassian.net/","name":"To Do","id":"10000","statusCategory":{"self":"https://xxx.atlassian.net/rest/api/2/statuscategory/2","id":2,"key":"new","colorName":"blue-gray","name":"To Do"}},"components":[],"timeoriginalestimate":null,"description":"super alles neu","timetracking":{},"customfield_10005":null,"attachment":[],"aggregatetimeestimate":null,"summary":"super alles neu","creator":{"self":"https://xxx.atlassian.net/rest/api/2/user?username=admin","name":"admin","key":"admin","emailAddress":"test#mail.at","avatarUrls":{"48x48":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=48","24x24":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=24","16x16":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=16","32x32":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=32"},"displayName":"Max Mustermann [Administrator]","active":true,"timeZone":"Europe/Berlin"},"subtasks":[],"reporter":{"self":"https://xxx.atlassian.net/rest/api/2/user?username=admin","name":"admin","key":"admin","emailAddress":"test#mail.at","avatarUrls":{"48x48":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=48","24x24":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=24","16x16":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=16","32x32":"https://secure.gravatar.com/avatar/3d238d8be45bd26982fa09ae2f891c3f?d=mm&s=32"},"displayName":"Max Mustermann [Administrator]","active":true,"timeZone":"Europe/Berlin"},"customfield_10000":"{}","aggregateprogress":{"progress":0,"total":0},"customfield_10001":null,"customfield_10115":null,"customfield_10116":"0|i0005r:","environment":null,"duedate":null,"progress":{"progress":0,"total":0},"comment":{"comments":[],"maxResults":0,"total":0,"startAt":0},"votes":{"self":"https://xxx.atlassian.net/rest/api/2/issue/TA-58/votes","votes":0,"hasVoted":false},"worklog":{"startAt":0,"maxResults":20,"total":0,"worklogs":[]}}}}
However, I don't want to have the whole JSON in my cell, I only want to have specific obejcts/id from within the JSON. How do I call them ?
After tons of research, this is a solution that works for me (in my case):
function doPost(response) {
var sheets = SpreadsheetApp.openById('SHEET_ID');
// retrieve data from JIRA Payload and store them into "data"
var json = response.postData.contents;
var data = JSON.parse(json);
// index values from "data" and store them into seperate variables
// for example:
var ticket_id = data.issue.key;
var priority_name = data.issue.fields.priority.name;
var summary = data.issue.fields.summary;
This two lines:
var json = response.postData.contents;
var data = JSON.parse(json);
Made it possible to read the body and index all the specific parameters I want.
Here is an example:
/*
* webhookHandler: JIRA webhook callback function
*/
function webhookHandler(response) {
var data = response.getAs("application/json");
//logs out data in dev console
console.log(data);
var spreadsheet = SpreadsheetApp.openById("<spreadsheet id>");
var cellRange = spreadsheet.getRangeByName("<some range name>");
var cell = cellRange.getCell(0 /*row index*/, 0/*column index*/);
cell.setValue(data.ticket_id/*index the JSON object returned by response*/);
}
UrlFetchApp Documentation
SpreadsheetApp Documentation
Basically I have a set of files that I process using markdown and what not. After doing this initial processing, I'd like to split the stream into two:
First, 1..1 mapping with additional processing like layout
Secondly, mapping all the files into one, like index, without the layouts applied above
Is it ok to save the stream into a variable and just keep piping? Here's my current task:
gulp.task('default', function() {
var entries = gulp.src('./log/*.md')
.pipe(frontMatter())
.pipe(markdown());
var templated = entries
.pipe(applyTemplate())
.pipe(gulp.dest('./build/log'));
var index = entries
.pipe(index())
.pipe(applyIndexTemplate())
.pipe(gulp.dest('./build'));
return merge(templated, index);
}
I could use lazypipe and/or just construct the pipe multiple times, but is there another way?
According to the Node.js docs, "multiple destinations can be piped to safely" and the original example is correct:
var entries = gulp.src('./log/*.md')
.pipe(frontMatter())
.pipe(markdown());
var templated = entries
.pipe(applyTemplate())
.pipe(gulp.dest('./build/log'));
var index = entries
.pipe(index())
.pipe(applyIndexTemplate())
.pipe(gulp.dest('./build'));
return merge(templated, index);
var gulpClone = require("gulp-clone");
var eventStream = require('event-stream');
var entries = gulp.src('./log/*.md')
.pipe(frontMatter())
.pipe(markdown());
var templated = entries
.pipe(gulpClone())
.pipe(applyTemplate())
.pipe(gulp.dest('./build/log'));
var index = entries
.pipe(gulpClone())
.pipe(index())
.pipe(applyIndexTemplate())
.pipe(gulp.dest('./build'));
return eventStream.merge(templated, index);
I have FlexTable with chekBoxes in first cell of each row, when checkBox is true data from FlexTable's row is collected in variable. Now I need to create document with table that contains table with data from variable. I tried to store string's value in Hidden but it doesn't work and can't figure out how to realise it.
All my (although the code is not really my, code is almost half #Sergeinsas's) code is avaliable here: http://pastebin.com/aYmyA7N2, thankyou in advance.
There are a few errors in your code... widgets like hidden can only have string values and they can only return string values when you retrieve their values.
One possible and easy way to convert arrays to string (and back) is to use a combination of join() and split() , here is the modified code (relevant part only) that works.
// Storing checked rows
function check(e) {
var checkedArray = [];
var data = sh.getRange(1,1,lastrow,lastcol).getValues();
for(var n=0; n < data.length;++n){
if(e.parameter['check'+n]=='true'){
checkedArray.push(data[n].join(','));// convert data row array to string with comma separator
}
}
var hidden = app.getElementById('hidden');
hidden.setValue(checkedArray.join('|'));// convert array to string with | separator
return app;
}
function click(e) {
var hiddenVal = e.parameter.hidden.split('|');// e.parameter.hidden is a string, split back in an array of strings, each string should be splitted too to get the original array of arrays
var d = new Date();
var time = d.toLocaleTimeString();
var table = []
for(var n in hiddenVal){
table.push(hiddenVal[n].split(','));// reconstruction of a 2D array
}
DocumentApp.create('doc '+time).getBody().appendTable(table);// the table is in the document
}
Full code available here
EDIT : suggestion : if you put your headers in your spreadsheet you could retrieve them in your final table quite easily like this :
function check(e) {
var checkedArray = [];
var data = sh.getRange(1,1,lastrow,lastcol).getValues();
checkedArray.push(data[0].join(','));// if you have headers in your spreadsheet, you could add headers by default
for(var n=0; n < data.length;++n){
if(e.parameter['check'+n]=='true'){
checkedArray.push(data[n].join(','));
}
}
You could also use data[0] in the doGet function to build the header of your UI, I think this would make your code more easy to maintain without hardcoding of data.... but this is only a suggestion ;-)