Gulp task - unable to set variable value - gulp

I'm using git-rev,gulp-header and run-sequence and trying to add some info - as well as - git commit number automatically to app.js file during building process.
here's the code I have so far:
var runSequence = require('gulp-run-sequence');
var git = require('git-rev');
var header = require('gulp-header');
var pkg = require('./info.json');
var paths = {addHeader: ['./www/js/app.js'], ...}
var commit, timestamp;
function getGitInfo() {
git.short(function (str) {
commit = str;
console.log(str);
});
};
var banner = ['"commit":"' + commit + '",',
'"timestamp":"' + timestamp + '",',
'"appVersion":"<%= pkg.appVersion %>",',
'"appReleaseDate":"<%= pkg.appReleaseDate %>"',
'};\n',
''].join('\n');
gulp.task('get-git-info', getGitInfo());
gulp.task('add-header', function () {
return gulp.src(paths.addHeader)
.pipe(header(banner, {pkg: pkg}))
.pipe(gulp.dest('./www-dev/js/'))
});
gulp.task('build', function (){
runSequence('get-git-info','add-header');
})
the console result is right, I have the commit number, but in app.js all I get is undefined:
aboutPage={
"appVersion":"5.0.0",
"appReleaseDate":"10/02/2016",
"commit":"undefined",
"timestamp":"undefined"
};
I am talking about commit, and NOT timestamp. I'm going to worry about timestamp later.
Any idea what am I doing wrong here?
thanks

There are a couple of things wrong with your Gulpfile:
Your banner variable is initialized before any of your tasks are even defined, let alone have been executed. That means that commit is still undefined when you initialize banner.
gulp.task expects a function as the task body. However the task body for get-git-info in your Gulpfile is getGitInfo(). That means you execute the getGitInfo function and assign the return value of that function call as the task body of get-git-info. Which in your case is undefined.
Even if you had assigned the getGitInfo function itself as the task body (instead of its return value), it still wouldn't work because git.short() is asynchronous. That means that get-git-info returns and add-header is run before your callback function to git.short() is called with the commit-id.
Here's a solution that addresses all three of these problems:
function banner() {
return [
'"commit":"' + commit + '",',
'"timestamp":"' + timestamp + '",',
'"appVersion":"<%= pkg.appVersion %>",',
'"appReleaseDate":"<%= pkg.appReleaseDate %>"',
'};\n',
''
].join('\n');
}
gulp.task('get-git-info', function(done) {
git.short(function (str) {
commit = str;
console.log(str);
done();
});
});
gulp.task('add-header', function () {
return gulp.src(paths.addHeader)
.pipe(header(banner(), {pkg: pkg}))
.pipe(gulp.dest('./www-dev/js/'))
});
banner is now a function so the commit variable isn't accessed until it has been initialized.
getGitInfo is gone. Instead an anonymous function is used as the task body for get-git-info, so it's dead obvious that we are in fact assigning a function here.
The anonymous function accepts a callback done which is called once the commit-id is available. This signals to gulp that get-git-info is finished and that runSequence can proceed with the add-header task.

Related

only last callback is being called in gulp file

In gulpfile, I've 3 tasks, when I run the following code, it only executes the callback of the last task. I want that if I run the gulp command, after completion of clean task, it should execute callback of copy:db & default task.
Gulp.task('clean', function () {
console.log('Clean');
return Gulp.src("./dbSchema/*")
.pipe(VinylPaths(Del));
})
Gulp.task('copy:db', Gulp.series("clean"), function () {
console.log('Copy DB');
return Gulp.src("./db/*")
.pipe(Gulp.dest("./dbSchema"));
})
Gulp.task('default', Gulp.series("copy:db"), function () {
console.log('defaulp');
return TypeScriptProject.src()
.pipe(TypeScriptProject())
.js.pipe(Gulp.dest('dist'));
});
When I run the command gulp, it shows the following log.
[12:46:37] Starting 'default'...
[12:46:37] Starting 'copy:db'...
[12:46:37] Starting 'clean'...
Clean
[12:46:37] Finished 'clean' after 26 ms
[12:46:37] Finished 'copy:db' after 28 ms
[12:46:37] Finished 'default' after 31 ms
Can anyone tell me where am I going wrong?
To get your code working how you’ve described, the callback functions need to be passed as a paramater to .series(). Eg:
Gulp.task('clean', function () {
console.log('Clean');
return Gulp.src("./dbSchema/*")
.pipe(VinylPaths(Del));
})
Gulp.task('copy:db', Gulp.series(clean, function () {
console.log('Copy DB');
return Gulp.src("./db/*")
.pipe(Gulp.dest("./dbSchema"));
}))
Gulp.task('default', Gulp.series(copy:db, function () {
console.log('defaulp');
return TypeScriptProject.src()
.pipe(TypeScriptProject())
.js.pipe(Gulp.dest('dist'));
}));
IMHO, it would be simpler to have three totally separate tasks:
Gulp.task('clean', function () {
console.log('Clean');
return Gulp.src("./dbSchema/*")
.pipe(VinylPaths(Del));
});
Gulp.task('copy:db', function () {
console.log('Copy DB');
return Gulp.src("./db/*")
.pipe(Gulp.dest("./dbSchema"));
});
Gulp.task('default', function () {
console.log('defaulp');
return TypeScriptProject.src()
.pipe(TypeScriptProject())
.js.pipe(Gulp.dest('dist'));
});
and then call them with either:
Gulp.task('default', gulp.series(clean, copy:db, js));
or
Gulp.task('default', gulp.series(clean, gulp.parallel(copy:db, js)));
Hope that helps :)
Additional Notes:
The naming convention for gulp/vars is normally camelCase, eg: gulp and typeScriptProject not Gulp or TypeScriptProject.
You can totally remove the need to ever write gulp. by using: var {gulp, task, src, dest, watch, series, parallel} = require('gulp');
Rather than defining your tasks directly, you can make your code easier to read by using CommonJS exports module notation to declare tasks.
Makes life a little easier if you are consistent when with quotes, rather than mixing singles and doubles. Both allow globbing
Following Gulp’s own documentation is perhaps the place to start, their sample code on github has some great examples of setting up a basic gulpfile.
If you wrap all that up you get this:
/*
* Example of requires with gulp methods also requiring gulp.
*/
var {
gulp,
dest,
series,
parallel,
src,
task,
watch
} = require('gulp'),
vinylPaths = require('vinyl-paths'), // may not be required, see note in clean func.
del = require('del'),
ts = require('gulp-typescript');
/*
* Added a basic TypeScript Project so the example is complete and could run.
*/
var typeScriptProject = ts.createProject({
declaration: true
});
/*
* Your tasks converted to plain/standard js functions.
*/
function clean () {
return src('dbSchema/*')
.pipe(vinylPaths(del));
// Looking at your example code the vinylPaths is redundant,
// as long as you’re using del ^2.0 it will return its promise,
// so you could replace the above with:
return del([ 'dbSchema' ]);
}
function copyDb () {
return src('db/*')
.pipe(dest('dbSchema'));
}
function scripts () {
// example src path
return src('lib/*.ts')
.pipe(typeScriptProject())
.pipe(dest('dist'));
}
/*
* By defining all the tasks separately it makes it really clear how the tasks will run.
*/
var build = gulp.series (
clean,
gulp.parallel (
copyDb,
scripts
)
);
/*
* Example of using `exports` module notation to declare tasks.
*/
exports.clean = clean;
exports.copyDb = copyDb;
exports.scripts = scripts;
exports.build = build;
/*
* Good practise to define the default task as a reference to another task.
*/
exports.default = build;

execute tasks synchronously in gulp

I have read online that 'run-sequence' will make sure all specified tasks will run synchronously. For some reason this is not true in my case. Am I missing something?
'convertSassToCss' is the task that does not work as intended
If I would run tasks 'cleanAllCss' and 'convertSassToCss' seperatelly, it would work.
The idea here is to first remove all css files from directory, then convert all sass files into css and place the into the cleaned dir
/// <binding BeforeBuild='clean, min:css' Clean='clean' />
"use strict";
var gulp = require("gulp"),
rimraf = require("rimraf"),
concat = require("gulp-concat"),
cssmin = require("gulp-cssmin"),
uglify = require("gulp-uglify"),
sass = require('gulp-sass'),
rename = require('gulp-rename'),
del = require('del'),
runSequence = require('run-sequence');
var paths = {
webroot: "./wwwroot/"
};
paths.cssPath = paths.webroot + "css/*.css";
paths.cssOutputPath = paths.webroot + "css";
//sass
paths.sassPath = paths.webroot + "sass/**/*.scss";
paths.sassOutputPath = paths.webroot + "./css/file";
gulp.task("cleanAllCss", function (cb) {
console.log("2 -- Removing all CSS files");
del([paths.cssOutputPath + "/*.css"], cb);
console.log("2 -- DONE - Removed all css files");
});
gulp.task("convertSassToCss", function (cb) {
console.log("3 -- Converting all SASS files into corresponding CSS");
gulp.src(paths.sassPath)
.pipe(sass())
.pipe(gulp.dest(paths.cssOutputPath));
console.log("3 -- DONE - Converting all SASS files into corresponding CSS");
});
//not working, should run in sequence
gulp.task("convertAllSassIntoCssMin", function (callback) {
console.log("1 -- Converting all SASS files into corresponding min CSS files")
runSequence('cleanAllCss', 'convertSassToCss', callback);
console.log("1 -- DONE - Converting all SASS files into corresponding min CSS files")
});
I cannot speak to run-sequence as I haven't used it before.
However, you can run tasks in sequence by using Gulp's task dependency feature, where a task will NOT run until it's dependencies have finished running.
Your new tasks signatures
cleanAllCss stays the same:
gulp.task("cleanAllCss", function (cb) { ... }
convertSassToCss changes to:
gulp.task("convertSassToCss", ['cleanAllCss'], function (cb) { ... }
convertAllSassIntoCssMin changes to:
gulp.task("convertAllSassIntoCssMin", ['convertSassToCss'], function (cb) { ... }
This ensures that convertAllSassIntoCssMin won't run until convertSassToCss has finished which in turn won't run until cleanAllCss has finished.
Refer to gulp deps:
deps
Type: Array
An array of tasks to be executed and completed before your task will
run.
gulp.task('mytask', ['array', 'of', 'task', 'names'], function() {
// Do stuff }); Note: Are your tasks running before the dependencies
are complete? Make sure your dependency tasks are correctly using the
async run hints: take in a callback or return a promise or event
stream.
You can also omit the function if you only want to run a bundle of
dependency tasks:
gulp.task('build', ['array', 'of', 'task', 'names']); Note: The tasks
will run in parallel (all at once), so don't assume that the tasks
will start/finish in order.
The problem is in the cleanAllCss task. The second parameter accepted by del is options, not the callback which you're trying to pass. The callback is never executed. Try running it manually when the removal is finished.
gulp.task("cleanAllCss", function (cb) {
console.log("2 -- Removing all CSS files");
del([paths.cssOutputPath + "/*.css"]).then(paths => {
console.log("2 -- DONE - Removed all css files");
cb();
};
});

JSON array undefined & empty in promise Bluebird

I am using Promise bluebird to process a json array objects from file. The problem arises if I want to store data in a json array (called list) and return this in the final process.
The list is empty/undefined after the return of list or even in the final process. Running the code, I always have 1 value that is not false which trigger the adding/push of the json in the list.
Can you help me with this issue? Below you will find my code.
Thanks in advance !!!
var Promise = require('bluebird');
var join = Promise.join;
var fs = Promise.promisifyAll(require("fs"));
fs.readdirAsync(dir).map(function (filename) {
return fs.readFileAsync(dir + "/" + filename, "utf8");
}).then(function(result){
var list=[];
result.map(function(row, index){
Promise.coroutine(function*() {
update(row, index).then(function(value){
if (value!=false){
var trade_update = new updated_Item(row.ID, row.Quantity, row.Price, row.Remark);
list.push(trade_update);
console.log(JSON.stringify(list)); <-- This works. It gives me data
}
return list;
})
})();
});
console.log('list: ' + JSON.stringify(list)); <-- output: list:[]
return list;
}).finally(function(result){
console.log('Final outcome: '+ ' ' + JSON.stringify(result)); <-- output: Final outcome: undefined
})
With the help of Samuel my code is now:
var Promise = require('bluebird');
var join = Promise.join;
var fs = Promise.promisifyAll(require("fs"));
function updateOrder(done){
fs.readdirAsync(dir).map(function (filename) {
return fs.readFileAsync(dir + "/" + filename, "utf8");
}).then(function(result){
var list=[];
result.map(function(row, index){
Promise.coroutine(function*() {
update(row, index).then(function(value){
if (value!=false){
var trade_update = new updated_Item(row.ID, row.Quantity, row.Price, row.Remark);
list.push(trade_update);
done(list);
}
})
})();
});
//done(list); <--if I put the done callback here, it will give me an empty list. I though once the result.map finished processing all the values give me the end result.
}
}
updateOrder(function(resultList){
console.log('List' + JSON.stringify(resultList));
})
This code give me whole resultList everytime the list has been updated (pushed) now.
I would to receive the resultList at the end once the function updateOrder is finished.
As noted in the comment. Promise.coroutine is asynchronous so this means that a result is not going to get return straight after your code reaches it. And this pretty much explains the phenomenon you are seeing where the latter print statements you got in the code is suggesting that list is undefined.
What you could do is wrap the entire code you got there in a function, then add a callback function as a parameter for the async functions to invoke when it has finished its duty, together returning the populated list back for later processing.
I have written a pseudo code for your case, unfortunately I couldn't test it on my IDE but the concept is there and it should work.
Consider my pseudo code:
var Promise = require('bluebird');
var join = Promise.join;
var fs = Promise.promisifyAll(require("fs"));
// Wrap everything you got into a function with a `done` parameter (callback fn)
function doStuff(done) {
fs.readdirAsync(dir).map(function (filename) {
return fs.readFileAsync(dir + "/" + filename, "utf8");
}).then(function(result){
var list=[];
result.map(function(row, index){
Promise.coroutine(function*() {
update(row, index).then(function(value){
if (value!=false){
var trade_update = new updated_Item(row.ID, row.Quantity, row.Price, row.Remark);
list.push(trade_update);
}
done(list);
})
})();
});
}).finally(function(result){
console.log('File read finish, but this doesnt mean I have finished doing everything!');
})
}
// call your function and provide a callback function for the async method to call
doStuff(function(resultList) {
console.log('list: ' + JSON.stringify(resultList));
// Continue processing the list data.
});

nightwatch.js return value from function outside a test

I have trouble moving certain code outside a test into a function that needs to return a value.
Here is part of my code for the test file
function getCountOfTopics(browser){
var count;
browser.getText('#sumTopics',
function(result){
count = result.value;
console.log(result.value);
}
);
return count;
};
module.exports = {
'Create article' : function(browser){
var noOfThreadsByInlineCode, noOfThreadsByFunction;
browser.getText('#sumTopics',
function(result){
noOfThreadsByInlineCode = result.value;
}
);
noOfThreadsByFunction = getCountOfTopics(browser);
browser.end();
}
}
Now, the variable noOfThreadsByInlineCode indeed gets the value in the DOM, but the variable noOfThreadsByFunction is undefined. The console does indeed print the correct value, so the function does get the correct value out of the DOM.
I would appreciate help in updating the function so that I do get the value returned.
One word answer is Asynchronisity. The code doesn't wait for your callback to get complete, thats what the feature of Node JS is.
If you are in desperately in need for the content inside of the callback you can write this variable into a file and then access it anywhere you want inside your code. Here's a bit of a workaround:
Save something in a file:
var fs = require('fs');
iThrowACallBack(function(response){
fs.writeFile('youCanSaveData.txt', this.response, function(err) {
if (err) throw err;
console.log('Saved!');
browser.pause(5000);
});
});
Access it somewhere else:
iAccessThefile(){
response = fs.readFileSync('youCanSaveData.txt').toString('utf-8');
}
Hope it helps.
You return variable 'count' outside the callback,that is why.You can take a look this topic How to return value from an asynchronous callback function?
function getCountOfTopics(browser){
var count;
browser.getText('#sumTopics',
function(result){
count = result.value;
console.log(result.value);
/// result.value is available in this callback.
}
);
What do you want to do with the 'value'?
ps:do not remember custom_command.I think it is very helpful for this issue.

gulp: blanking output if Browserify fails

I'm using Browserify in my gulpfile.js like so:
browserify("src/main.js").bundle()
.on("end", function() {
console.log("compiled JavaScript via Browserify");
})
.on("error", function(err) {
console.warn("ERROR: Browserify failed", err);
})
.pipe(source("bundle.js"))
.pipe(gulp.dest("./dist"));
However, it's easy to miss compilation errors - so I'd prefer to blank dist/bundle.js (or insert an alert("ERROR");) instead of just reporting errors in the terminal.
My attempts to implement this failed due to what I assume is a race condition (pipe conclusion overwriting file operations in the "error" event handler), so I'd be grateful for some advice on how to do this properly.
If you think your issue is due to the asynchronicity of the code, you can use async or q to dictate the order in which your code executes (try async.waterfall, for example).
If you want to blank out a file on error, you can write a gulp stream handler that handles control structures that get generated in the stream and blanks the file out, otherwise just passes it through. Here is the code of the handler:
var through2 = require('through2');
var gutil = require('gulp-util');
var blank = function () {
var blank = false;
return through2.obj(
function (file, encoding, cb) {
if (!file.control && !blank) {
this.push(file);
} else if (file.control) {
blank = true;
} else if (file.path && blank) {
var newFile = new gutil.File({
base: file.base,
cwd: file.cwd,
path: file.path,
contents: new Buffer('')
});
this.push(newFile);
}
cb();
}, function (cb) {
cb();
});
};
Then you need to catch the error and generate a control structure. This can be done like this, in the on("error" handler, you place the following line of code:
this.push({"control": "failed"});
If you make the blank handler the last handler in the stream before the output, like this:
.pipe(blank())
.pipe(gulp.dest('./dist'));
Then you will have an empty file instead of the processed one. Of course you could modify this to write the error information to the file instead or do any number of things.
That having been said, why would you simply not want the processing to stop when you encounter this error? Any unhandled stream error should stop processing.