Run long running executable with output in gulp [duplicate] - gulp

I have this simple script :
var exec = require('child_process').exec;
exec('coffee -cw my_file.coffee', function(error, stdout, stderr) {
console.log(stdout);
});
where I simply execute a command to compile a coffee-script file. But stdout never get displayed in the console, because the command never ends (because of the -w option of coffee).
If I execute the command directly from the console I get message like this :
18:05:59 - compiled my_file.coffee
My question is : is it possible to display these messages with the node.js exec ? If yes how ? !
Thanks

Don't use exec. Use spawn which is an EventEmmiter object. Then you can listen to stdout/stderr events (spawn.stdout.on('data',callback..)) as they happen.
From NodeJS documentation:
var spawn = require('child_process').spawn,
ls = spawn('ls', ['-lh', '/usr']);
ls.stdout.on('data', function (data) {
console.log('stdout: ' + data.toString());
});
ls.stderr.on('data', function (data) {
console.log('stderr: ' + data.toString());
});
ls.on('exit', function (code) {
console.log('child process exited with code ' + code.toString());
});
exec buffers the output and usually returns it when the command has finished executing.

exec will also return a ChildProcess object that is an EventEmitter.
var exec = require('child_process').exec;
var coffeeProcess = exec('coffee -cw my_file.coffee');
coffeeProcess.stdout.on('data', function(data) {
console.log(data);
});
OR pipe the child process's stdout to the main stdout.
coffeeProcess.stdout.pipe(process.stdout);
OR inherit stdio using spawn
spawn('coffee -cw my_file.coffee', { stdio: 'inherit' });

There are already several answers however none of them mention the best (and easiest) way to do this, which is using spawn and the { stdio: 'inherit' } option. It seems to produce the most accurate output, for example when displaying the progress information from a git clone.
Simply do this:
var spawn = require('child_process').spawn;
spawn('coffee', ['-cw', 'my_file.coffee'], { stdio: 'inherit' });
Credit to #MorganTouvereyQuilling for pointing this out in this comment.

Inspired by Nathanael Smith's answer and Eric Freese's comment, it could be as simple as:
var exec = require('child_process').exec;
exec('coffee -cw my_file.coffee').stdout.pipe(process.stdout);

I'd just like to add that one small issue with outputting the buffer strings from a spawned process with console.log() is that it adds newlines, which can spread your spawned process output over additional lines. If you output stdout or stderr with process.stdout.write() instead of console.log(), then you'll get the console output from the spawned process 'as is'.
I saw that solution here:
Node.js: printing to console without a trailing newline?
Hope that helps someone using the solution above (which is a great one for live output, even if it is from the documentation).

I have found it helpful to add a custom exec script to my utilities that do this.
utilities.js
const { exec } = require('child_process')
module.exports.exec = (command) => {
const process = exec(command)
process.stdout.on('data', (data) => {
console.log('stdout: ' + data.toString())
})
process.stderr.on('data', (data) => {
console.log('stderr: ' + data.toString())
})
process.on('exit', (code) => {
console.log('child process exited with code ' + code.toString())
})
}
app.js
const { exec } = require('./utilities.js')
exec('coffee -cw my_file.coffee')

After reviewing all the other answers, I ended up with this:
function oldSchoolMakeBuild(cb) {
var makeProcess = exec('make -C ./oldSchoolMakeBuild',
function (error, stdout, stderr) {
stderr && console.error(stderr);
cb(error);
});
makeProcess.stdout.on('data', function(data) {
process.stdout.write('oldSchoolMakeBuild: '+ data);
});
}
Sometimes data will be multiple lines, so the oldSchoolMakeBuild header will appear once for multiple lines. But this didn't bother me enough to change it.

child_process.spawn returns an object with stdout and stderr streams.
You can tap on the stdout stream to read data that the child process sends back to Node. stdout being a stream has the "data", "end", and other events that streams have. spawn is best used to when you want the child process to return a large amount of data to Node - image processing, reading binary data etc.
so you can solve your problem using child_process.spawn as used below.
var spawn = require('child_process').spawn,
ls = spawn('coffee -cw my_file.coffee');
ls.stdout.on('data', function (data) {
console.log('stdout: ' + data.toString());
});
ls.stderr.on('data', function (data) {
console.log('stderr: ' + data.toString());
});
ls.on('exit', function (code) {
console.log('code ' + code.toString());
});

Here is an async helper function written in typescript that seems to do the trick for me. I guess this will not work for long-lived processes but still might be handy for someone?
import * as child_process from "child_process";
private async spawn(command: string, args: string[]): Promise<{code: number | null, result: string}> {
return new Promise((resolve, reject) => {
const spawn = child_process.spawn(command, args)
let result: string
spawn.stdout.on('data', (data: any) => {
if (result) {
reject(Error('Helper function does not work for long lived proccess'))
}
result = data.toString()
})
spawn.stderr.on('data', (error: any) => {
reject(Error(error.toString()))
})
spawn.on('exit', code => {
resolve({code, result})
})
})
}

Related

ReferenceError: data is not defined when writing a piped gulp script

gulp.task('default', function(done) {
inquirer.prompt([{
type: `input`,
message: `Enter the path`,
default: `./admin/admin.json`,
name: `path`
}]).then(function(answers) {
console.log(answers.path);
console.log('answers');
mydefaultTaskTwo(null, answers.path).pipe(pipedFunction());
done();
})
});
function mydefaultTaskTwo(cb, path) {
let data = '';
try {
data = fs.readFileSync(path, 'utf-8');
} catch (e) {
console.log(`Error: ${e}`);
}
return data;
}
function pipedFunction() {
let object = JSON.parse(data);
object['main'] = 'admin';
data = JSON.stringify(object);
const readable = Readable.from(data)
return readable;
}
I understand that src returns a stream and pipe takes that stream and return a stream, but how do you feed in the stream into the pipedFunction called inside of pipe? I am unsure how it works. I get the following error:
ReferenceError: data is not defined.
Is there something I am misunderstanding about gulp scripts?
Basically you define data as a local scope-level variable and try to reach it from a different scope, where it's undefined. So, you need to make use of the fact that data is returned and pass it, like:
var data = mydefaultTaskTwo(null, answers.path);
data.pipe(pipedFunction(data));

What is the right way to use sharp with gulp?

I am trying to convert a bunch of svgs to png.
But due to the sad state of gulp-svg2png and gulp-sharp I'm evaluating other options; sharp looks promising enough.
Versions:
$ gulp --version
CLI version: 2.2.0
Local version: 4.0.2
$ node -
Welcome to Node.js v13.6.0.
Type ".help" for more information.
> require('./package.json').dependencies.sharp
'^0.24.0'
Unfortunately the following
gulp.task('svg-convert', function() {
var sharp = require('sharp')
gulp.src(config.svg2png)
.pipe(sharp().png())
.pipe(gulp.dest('./dist/images'))
});
sends this error:
TypeError [ERR_INVALID_ARG_TYPE] [ERR_INVALID_ARG_TYPE]: The "chunk" argument must be of type string or an instance of Buffer. Received an instance of File
at validChunk (_stream_writable.js:285:10)
at Sharp.Writable.write (_stream_writable.js:324:23)
...
(stacktrace continues)
Has someone solved this?
Or is there a better way to deal with this particular need?
Edited after #AllainLG answer.
These are my two takes.
Unfortunately both are failing.
gulp.task('svg2png', function() {
var sharp = require('sharp');
return gulp.src(c.svg2png)
.pipe(plug.each(function(content, file, callback) {
var newContent = sharp(content).png(); // here content is a buffer containing the image, newContent should be too?
// var newContent = sharp(content).png().toBuffer(); // this sends a Promise and fails
return callback(null, newContent);
}), 'buffer')
.pipe(plug.rename(function(path) {
return path.extname = ".png";
}))
.pipe(plug.imagemin())
.pipe(gulp.dest('./dist/images'));
});
This pipes nothing to the destination: nothing in ./dist/images.
This second attempt uses gulp-tap.
gulp.task('svg2png2', function() {
var sharp = require('sharp');
return gulp.src(c.svg2png)
.pipe(plug.tap(function(file) {
return file.contents = sharp(file.contents).png();
}))
.pipe(plug.rename(function(path) {
return path.extname = ".png";
}))
// .pipe(plug.imagemin()) // sends "Streaming not supported"
.pipe(gulp.dest('./dist/images'));
});
This generates empty files in the destination (with the right names).
The following works, with the minor bonus of an ~100ms (average 10 complex svgs ~430ms) time improvement over gulp-svg2png (average 10 complex svgs ~540ms).
It uses through2.
I'll accept this answer in around a month unless someone has a better answer.
gulp.task('svg2png', function() {
var sharp, through2;
sharp = require('sharp');
through2 = require('through2');
return gulp.src(c.svg2png)
.pipe(through2.obj(function(file, _, cb) {
return sharp(file.contents).png().toBuffer().then(function(buffer) {
file.contents = buffer;
return cb(null, file);
}).catch(function(err) {
console.error(err);
return cb(null, file);
});
}))
.pipe(plug.rename(function(path) {
return path.extname = ".png";
}))
.pipe(plug.imagemin())
.pipe(gulp.dest('./dist/images'));
});
sharp first parameter needs to be a string of a Buffer, gulp won't pass these.
You can use gulp-each to call sharp on each file (or write your own simple method)

aws s3 bucket image upload in angular6 having problem in return

I am using this approach to upload images to aws s3 bucket:
https://grokonez.com/aws/angular-4-amazon-s3-example-how-to-upload-file-to-s3-bucket
This works fine as an individual task but as far as I rely on the result which is coming a bit later due to async behavior may be. I would like the next task to be executed just after the confirmation.
upload() {
let file: any;
// let urltype = '';
let filename = '';
// let b: boolean;
for (let i = 0 ; i < this.urls.length ; i++) {
file = this.selectedFiles[i];
// urltype = this.urltype[i];
filename = file.name;
const k = uuid() + '.' + filename.substr((filename.lastIndexOf('.') + 1));
this.uploadservice.uploadfile(file, k);
console.log('done');
// console.log('file: ' + file + ' : ' + filename);
// let x = this.userservice.PostImage('test', file);
// console.log('value of ' + x);
}
// return b;
}
fileupload service:
bucket.upload(params, function (err, data) {
if (err) {
console.log('There was an error uploading your file: ', err);
return false;
}
console.log('Successfully uploaded file.', data);
return true;
}).promise();
}
Here, done is getting executed before the file upload is done.
I think you should check out a tutorial for asynchronous programming and try to play around with couple of examples using simple timeouts to get the hang of it and then proceed with more complex things like s3 and aws.
Here is how I suggest you start your journey:
1) Learn the basic concepts of asynchronous programming using pure JS
https://eloquentjavascript.net/11_async.html
2) Play around with your own examples using callbacks and timeouts
3) Replace the callbacks with Promises
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Using_promises
4) Do it the "angular" way with rxjs Observables (similar to JS Observable)
http://reactivex.io/rxjs/class/es6/Observable.js~Observable.html
PS: To be more concrete:
Your code fails because the following line is executed in an asynchronous manner. Thus the code will call your uploadfile function and will immedietly continue executing without waiting.
this.uploadservice.uploadfile(file, k);
Once you follow all the points I described above you will be able to do something like this (using a Promise):
this.uploadservice.uploadfile(file, k)
.then( result => {
console.log('Upload finished');
})
.catch(error => {
console.log('Something went wrong');
});

calling store procedures within fast-csv asynchronously

I am writing a backend API in node.js and need the functionality for users to be able to upload files with data and then calling stored procedures for inserting data into MySQL. I'm thinking of using fast-csv as parser, however I am struggling with how to set up the call to stored procedure in csv stream. the idea is something like this:
var fs = require("fs");
var csv = require("fast-csv");
var stream1 = fs.createReadStream("files/testCsvFile.csv");
csv
.fromStream(stream2, { headers: true })
.on("data", function(data) {
//CALL TO SP with params from "data"//
numlines++;
})
.on("end", function() {
console.log("done");
});
In other parts of application I have set up routes as follows:
auth.post("/verified", async (req, res) => {
var user = req.session.passwordless;
if (user) {
const rawCredentials = await admin.raw(getUserRoleCredentials(user));
const { user_end, role } = await normalizeCredentials(rawCredentials);
const user_data = { user_end, role };
res.send(user_data);
} else {
res.sendStatus(401);
}
});
..that is - routes are written in async/await way with queries (all are Stored Procedures called) being defined as Promises.. I would like to follow this pattern in upload/parse csv/call SP for every line function
This is doing the job for me - - can you please describe how to achive that with your framework - - I believe it should be done somehowe, I just need to configure it correctli
//use fast-csv to stream data from a file
csv
.fromPath(form.FileName, { headers: true })
.on("data", async data => {
const query = await queryBuilder({
schema,
routine,
parameters,
request
}); //here we prepare query for calling the SP with parameters from data
winston.info(query + JSON.stringify(data));
const rawResponse = await session.raw(query); //here the query gets executed
fileRows.push(data); // push each row - for testing only
})
.on("end", function() {
console.log(fileRows);
fs.unlinkSync(form.FileName); // remove temp file
//process "fileRows" and respond
res.end(JSON.stringify(fileRows)) // - for testing
});
As mentioned in the comment, I made my scramjet to handle such a use case with ease... Please correct me if I understood it wrong, but I understand you want to call the two await lines for every CSV row in the test.
If so, your code would look like this (updated to match your comment/answer):
var fs = require("fs");
var csv = require("fast-csv");
var stream1 = fs.createReadStream("files/testCsvFile.csv");
var {DataStream} = require("scramjet");
DataStream
// the following line will convert any stream to scramjet.DataStream
.from(csv.fromStream(stream2, { headers: true }))
// the next lines controls how many simultaneous operations are made
// I assumed 16, but if you're fine with 40 or you want 1 - go for it.
.setOptions({maxParallel: 16})
// the next line will call your async function and wait until it's completed
// and control the back-pressure of the stream
.do(async (data) => {
const query = await queryBuilder({
schema,
routine,
parameters,
request
}); //here we prepare query for calling the SP with parameters from data
winston.info(query + JSON.stringify(data));
const rawResponse = await session.raw(query); //here the query gets executed
return data; // push each row - for testing only)
})
// next line will run the stream until end and return a promise
.toArray()
.then(fileRows => {
console.log(fileRows);
fs.unlinkSync(form.FileName); // remove temp file
//process "fileRows" and respond
res.end(JSON.stringify(fileRows)); // - for testing
})
.catch(e => {
res.writeHead(500); // some error handling
res.end(e.message);
})
;
// you may want to put an await statement before this, or call then to check
// for errors, which I assume is your use case.
;
To answer your comment question - if you were to use an async function in the on("data") event - you would need to create an array of promises and await Promise.all of that array on stream end - but that would need to be done synchronously - so async function in an event handler won't do it.
In scramjet this happens under the hood, so you can use the function.

Get random wiki page from cloud functions

I tried to get a random Wikipedia page over their API via Google Cloud Functions. The Wikipedia API works fine. This is my request:
https://de.wikipedia.org/w/api.php?action=query&format=json&generator=random
For testing you can change the format to jsonfm in see the result in the browser. Click here 👍.
But it seems that my functions get destroyed even before the request was completely successfully. If I want to parse the data (or even if I want to log that data) I got a
SyntaxError: Unexpected end of json
The log look like (for example) that (no I haven't cut it by myself):
DATA: ue||"},"query":{"pages":{"2855038":{"pageid":2855038,"ns":0,"title":"Thomas Fischer
Of course, that is not a valid json and can't be parsed. Whatever this is my function:
exports.randomWikiPage = function getRandomWikiPage (req, res) {
const httpsOptions = {
host: "de.wikipedia.org",
path: "/w/api.php?action=query&format=json&generator=random"
};
const https = require('https');
https.request(httpsOptions, function(httpsRes) {
console.log('STATUS: ' + httpsRes.statusCode)
console.log('HEADERS: ' + JSON.stringify(httpsRes.headers))
httpsRes.setEncoding('utf8')
httpsRes.on('data', function (data) {
console.log("DATA: " + data)
const wikiResponse = JSON.parse(data);
const title = wikiResponse.query.title
res.status(200).json({"title": title})
});
}).end();
};
I've already tried to return something here. Like that video explained. But as I look into the node docs https.request don't return a Promise. So return that is wrong. I've also tried to extract the on('data', callback) into it's own function so that I can return the callback. But I haven't a success with that either.
How have to look my function that it return my expected:
{"title": "A random Wikipedia Page title"}
?
I believe your json comes through as a stream in chunks. You're attempting to parse the first data chunk that comes back. Try something like:
https.request(httpsOptions, function(httpsRes) {
console.log('STATUS: ' + httpsRes.statusCode)
console.log('HEADERS: ' + JSON.stringify(httpsRes.headers))
httpsRes.setEncoding('utf8')
let wikiResponseData = '';
httpsRes.on('data', function (data) {
wikiResponseData += data;
});
httpRes.on('end', function() {
const wikiResponse = JSON.parse(wikiResponseData)
const title = wikiResponse.query.title
res.status(200).json({"title": title})
})
}).end();
};