How do I process one src file multiple ways using gulp? - gulp

I want to use one source file to generate multiple destination files with different characteristics. Here's a watered down version of what I want to do:
gulp.task('build', function() {
var src = gulp.src('myfile.txt')
for (var i = 0; i < 10; i++) {
src
.pipe(someplugin(i))
.pipe(rename('myfile-' + i + '.txt'))
.dest('./build')
}
});
Presumably, "someplugin" would change the file contents based on the index passed to it, so the code above would generate 10 files, each with slightly different content.
This doesn't work, and I didn't expect it to, but is there another way to achieve this?

I ended up creating a function that builds my tasks for me. Here's an example:
function taskBuilder(i) {
return function() {
gulp.src('*.mustache')
.pipe(someplugin(i))
.pipe(rename('myfile-' + i + '.txt'))
.dest('./build');
};
}
var tasks, task, i;
for (i = 0; i < 10; i++) {
taskName = 'tasks-' + i;
gulp.task(taskName, taskBuilder(i));
tasks.push(task);
}
gulp.task('default', tasks);

Maybe you should check this :
How to save a stream into multiple destinations with Gulp.js?
If it doesn't answer your question, I believe a simple solution would be to have many tasks all running one after the other. For example :
gulp.task('build', function() {
return gulp.src('*.txt')
.pipe(someplugin(i))
.pipe(rename('myfile-1.txt'))
.dest('./build')
}
});
gulp.task('build2', [build], function() {
return gulp.src('*.txt')
.pipe(someplugin(i))
.pipe(rename('myfile-2.txt'))
.dest('./build')
}
});
Running task 'build2' will run 'build' first, and then 'build2' when it's complete. If you don't have to wait for 'build2' to be finished, just remove the "return" and they will run at the same time.

Related

Correct way to signal stream complete

I've created a function to pipe data into a file and output individual files for each line of data. However, there are 35K lines of data to compile and although the code below signals "done" after about 5 mins, the files are still processing. I can't start the next function until this one is completed so I need to know when it has actually finished.
function createLocations(done) {
if (GENERATE_LOCATIONS) {
for (var i = 0; i < locations.length; i++) {
var location = locations[i],
fileName = 'location-' + location.area.replace(/ +/g, '-').replace(/'+/g, '').replace(/&+/g, 'and').toLowerCase() + '-' + location.town.replace(/ +/g, '-').replace(/'+/g, '').replace(/`+/g, '').replace(/&+/g, 'and').toLowerCase();
gulp.src('./templates/location-template.html')
.pipe($.rename(fileName + ".html"))
.pipe(gulp.dest('./tmp/'));
}
}
done();
console.log('Creating '+locations.length+' Files Please Wait...');
}
The function runs for a further 15mins after it has signalled done. Appreciate any help to detect the actual completion of the function.
Many thanks!

Execution order in NodeJS [duplicate]

This question already has answers here:
JavaScript closure inside loops – simple practical example
(44 answers)
Closed 4 years ago.
The community reviewed whether to reopen this question 3 months ago and left it closed:
Duplicate This question has been answered, is not unique, and doesn’t differentiate itself from another question.
I am running an event loop of the following form:
var i;
var j = 10;
for (i = 0; i < j; i++) {
asynchronousProcess(callbackFunction() {
alert(i);
});
}
I am trying to display a series of alerts showing the numbers 0 through 10. The problem is that by the time the callback function is triggered, the loop has already gone through a few iterations and it displays a higher value of i. Any recommendations on how to fix this?
The for loop runs immediately to completion while all your asynchronous operations are started. When they complete some time in the future and call their callbacks, the value of your loop index variable i will be at its last value for all the callbacks.
This is because the for loop does not wait for an asynchronous operation to complete before continuing on to the next iteration of the loop and because the async callbacks are called some time in the future. Thus, the loop completes its iterations and THEN the callbacks get called when those async operations finish. As such, the loop index is "done" and sitting at its final value for all the callbacks.
To work around this, you have to uniquely save the loop index separately for each callback. In Javascript, the way to do that is to capture it in a function closure. That can either be done be creating an inline function closure specifically for this purpose (first example shown below) or you can create an external function that you pass the index to and let it maintain the index uniquely for you (second example shown below).
As of 2016, if you have a fully up-to-spec ES6 implementation of Javascript, you can also use let to define the for loop variable and it will be uniquely defined for each iteration of the for loop (third implementation below). But, note this is a late implementation feature in ES6 implementations so you have to make sure your execution environment supports that option.
Use .forEach() to iterate since it creates its own function closure
someArray.forEach(function(item, i) {
asynchronousProcess(function(item) {
console.log(i);
});
});
Create Your Own Function Closure Using an IIFE
var j = 10;
for (var i = 0; i < j; i++) {
(function(cntr) {
// here the value of i was passed into as the argument cntr
// and will be captured in this function closure so each
// iteration of the loop can have it's own value
asynchronousProcess(function() {
console.log(cntr);
});
})(i);
}
Create or Modify External Function and Pass it the Variable
If you can modify the asynchronousProcess() function, then you could just pass the value in there and have the asynchronousProcess() function the cntr back to the callback like this:
var j = 10;
for (var i = 0; i < j; i++) {
asynchronousProcess(i, function(cntr) {
console.log(cntr);
});
}
Use ES6 let
If you have a Javascript execution environment that fully supports ES6, you can use let in your for loop like this:
const j = 10;
for (let i = 0; i < j; i++) {
asynchronousProcess(function() {
console.log(i);
});
}
let declared in a for loop declaration like this will create a unique value of i for each invocation of the loop (which is what you want).
Serializing with promises and async/await
If your async function returns a promise, and you want to serialize your async operations to run one after another instead of in parallel and you're running in a modern environment that supports async and await, then you have more options.
async function someFunction() {
const j = 10;
for (let i = 0; i < j; i++) {
// wait for the promise to resolve before advancing the for loop
await asynchronousProcess();
console.log(i);
}
}
This will make sure that only one call to asynchronousProcess() is in flight at a time and the for loop won't even advance until each one is done. This is different than the previous schemes that all ran your asynchronous operations in parallel so it depends entirely upon which design you want. Note: await works with a promise so your function has to return a promise that is resolved/rejected when the asynchronous operation is complete. Also, note that in order to use await, the containing function must be declared async.
Run asynchronous operations in parallel and use Promise.all() to collect results in order
function someFunction() {
let promises = [];
for (let i = 0; i < 10; i++) {
promises.push(asynchonousProcessThatReturnsPromise());
}
return Promise.all(promises);
}
someFunction().then(results => {
// array of results in order here
console.log(results);
}).catch(err => {
console.log(err);
});
async await is here
(ES7), so you can do this kind of things very easily now.
var i;
var j = 10;
for (i = 0; i < j; i++) {
await asycronouseProcess();
alert(i);
}
Remember, this works only if asycronouseProcess is returning a Promise
If asycronouseProcess is not in your control then you can make it return a Promise by yourself like this
function asyncProcess() {
return new Promise((resolve, reject) => {
asycronouseProcess(()=>{
resolve();
})
})
}
Then replace this line await asycronouseProcess(); by await asyncProcess();
Understanding Promises before even looking into async await is must
(Also read about support for async await)
Any recommendation on how to fix this?
Several. You can use bind:
for (i = 0; i < j; i++) {
asycronouseProcess(function (i) {
alert(i);
}.bind(null, i));
}
Or, if your browser supports let (it will be in the next ECMAScript version, however Firefox already supports it since a while) you could have:
for (i = 0; i < j; i++) {
let k = i;
asycronouseProcess(function() {
alert(k);
});
}
Or, you could do the job of bind manually (in case the browser doesn't support it, but I would say you can implement a shim in that case, it should be in the link above):
for (i = 0; i < j; i++) {
asycronouseProcess(function(i) {
return function () {
alert(i)
}
}(i));
}
I usually prefer let when I can use it (e.g. for Firefox add-on); otherwise bind or a custom currying function (that doesn't need a context object).
var i = 0;
var length = 10;
function for1() {
console.log(i);
for2();
}
function for2() {
if (i == length) {
return false;
}
setTimeout(function() {
i++;
for1();
}, 500);
}
for1();
Here is a sample functional approach to what is expected here.
ES2017: You can wrap the async code inside a function(say XHRPost) returning a promise( Async code inside the promise).
Then call the function(XHRPost) inside the for loop but with the magical Await keyword. :)
let http = new XMLHttpRequest();
let url = 'http://sumersin/forum.social.json';
function XHRpost(i) {
return new Promise(function(resolve) {
let params = 'id=nobot&%3Aoperation=social%3AcreateForumPost&subject=Demo' + i + '&message=Here%20is%20the%20Demo&_charset_=UTF-8';
http.open('POST', url, true);
http.setRequestHeader('Content-type', 'application/x-www-form-urlencoded');
http.onreadystatechange = function() {
console.log("Done " + i + "<<<<>>>>>" + http.readyState);
if(http.readyState == 4){
console.log('SUCCESS :',i);
resolve();
}
}
http.send(params);
});
}
(async () => {
for (let i = 1; i < 5; i++) {
await XHRpost(i);
}
})();
JavaScript code runs on a single thread, so you cannot principally block to wait for the first loop iteration to complete before beginning the next without seriously impacting page usability.
The solution depends on what you really need. If the example is close to exactly what you need, #Simon's suggestion to pass i to your async process is a good one.

Gulp default task unable to compress after copy

At first I thought this was related to dependency of tasks so I went with run-sequence and even tried defining dependencies within tasks themselves. But I cannot get the compress task to run after copy. Or, even if it says it did finish the compress task, the compression only works if I run compress in the task runner inside visual studio by itself. What else can I try to get it to compress after copy?
/// <binding BeforeBuild='default' />
/*
This file is the main entry point for defining Gulp tasks and using Gulp plugins.
Click here to learn more. https://go.microsoft.com/fwlink/?LinkId=518007
*/
var gulp = require("gulp");
var debug = require("gulp-debug");
var del = require("del");
var uglify = require("gulp-uglify");
var pump = require("pump");
var runSequence = require("run-sequence");
var paths = {
bower: "./bower_components/",
lib: "./Lib/"
};
var modules = {
"store-js": ["store-js/dist/store.legacy.js"],
"bootstrap-select": [
"bootstrap-select/dist/css/bootstrap-select.css",
"bootstrap-select/dist/js/bootstrap-select.js",
"bootstrap-select/dist/js/i18n/*.min.js"
]
}
gulp.task("default", function (cb) {
runSequence("clean", ["copy", "compress"], cb);
});
gulp.task("clean",
function () {
return del.sync(["Lib/**", "!Lib", "!Lib/ReadMe.md"]);
});
gulp.task("compress",
function (cb) {
pump([
gulp.src(paths.lib + "**/*.js"),
uglify(),
gulp.dest(paths.lib)
], cb);
});
gulp.task("copy",
function (cb) {
prefixPathToModules();
copyModules();
cb();
});
function prefixPathToModules() {
for (var moduleIndex in modules) {
for (var fileIndex in modules[moduleIndex]) {
modules[moduleIndex][fileIndex] = paths.bower + modules[moduleIndex][fileIndex];
}
}
}
function copyModules() {
for (var files in modules) {
gulp.src(modules[files], { base: paths.bower })
.pipe(gulp.dest(paths.lib));
}
}
You use run-sequence and your code
runSequence("clean", ["copy", "compress"], cb);
run in such order
clean
copy and compress in parallel // that's why your code compresses nothing, because you have not copied files yet
cb
Write like this and compress will be after copy
runSequence("clean", "copy", "compress", cb);
I am not familiar with runSequence. But why don't you try the following. By this way your default task depends on compress and compress depends on copy. So, 'copy' will run first and then 'compress'
gulp.task('default', ['copy','compress'], function(cb){});
gulp.task('compress',['copy'], function(cb){});
Gulp returns a steam , since you are calling it in a for loop the stream is returned during the first iteration itself.
Update your copyModule to the following and you can try either runSequence like posted by Kirill or follow my approach
function copyModules() {
var inputFileArr = [];
for (var files in modules) {
inputFileArr = inputFileArr.concat(modules[files]);
};
return gulp.src(inputFileArr, { base: paths.bower })
.pipe(gulp.dest(paths.lib));
}

Using gulp-minify-html and gulp-html-replace together

I am using Gulp with gulp-minify-html and gulp-html-replace:
var minifyhtml = require('gulp-minify-html');
var htmlreplace = require('gulp-html-replace');
var dev_paths = {
HTML: dev + '/**/*.html'
};
var prod_paths = {
RELATIVE_CSS: ['css/bootstrap.css', 'css/font-awesome.css', 'css/c3.css', 'css/main.css'],
};
//Compress HTML
gulp.task('minify-html', function () {
var opts = {
empty: true,
comments: true
};
return gulp.src(dev_paths.HTML)
.pipe(minifyhtml(opts))
.pipe(gulp.dest(prod + '/'));
});
//Add call to the JS and CSS in the HTML files
gulp.task('replace-files', function() {
gulp.src(dev_paths.HTML)
.pipe(htmlreplace({
'css': prod_paths.RELATIVE_CSS,
'js': 'js/script.js'
}))
.pipe(gulp.dest('public/prod/'));
});
gulp.task('prod',['replace-files','minify-html'], function(){
})
However, the HTML doesn't replace the CSS and JS files I specified with task replace-files. When I run gulp without the task minify-html, it works fine though.
Does anyone knows why using both tasks replace-files and minify-html together is not working?
Thank you.
As the tasks run in parallel it is likely the 'minify-html' task is running before the 'replace-files' task is complete.
Try using run-sequence to ensure the tasks run in the required order.

Repeatedly Grab DOM in Chrome Extension

I'm trying to teach myself how to write Chrome extensions and ran into a snag when I realized that my jQuery was breaking because it was getting information from the extension page itself and not the tab's current page like I had expected.
Quick summary, my sample extension will refresh the page every x seconds, look at the contents/DOM, and then do some stuff with it. The first and last parts are fine, but getting the DOM from the page that I'm on has proven very difficult, and the documentation hasn't been terribly helpful for me.
You can see the code that I have so far at these links:
Current manifest
Current js script
Current popup.html
If I want to have the ability to grab the DOM on each cycle of my setInterval call, what more needs to be done? I know that, for example, I'll need to have a content script. But do I also need to specify a background page in my manifest? Where do I need to call the content script within my extension? What's the easiest/best way to have it communicate with my current js file on each reload? Will my content script also be expecting me to use jQuery?
I know that these questions are basic and will seem trivial to me in retrospect, but they've really been a headache trying to explore completely on my own. Thanks in advance.
In order to access the web-pages DOM you'll need to programmatically inject some code into it (using chrome.tabs.executeScript()).
That said, although it is possible to grab the DOM as a string, pass it back to your popup, load it into a new element and look for what ever you want, this is a really bad approach (for various reasons).
The best option (in terms of efficiency and accuracy) is to do the processing in web-page itself and then pass just the results back to the popup. Note that in order to be able to inject code into a web-page, you have to include the corresponding host match pattern in your permissions property in manifest.
What I describe above can be achieved like this:
editorMarket.js
var refresherID = 0;
var currentID = 0;
$(document).ready(function(){
$('.start-button').click(function(){
oldGroupedHTML = null;
oldIndividualHTML = null;
chrome.tabs.query({ active: true }, function(tabs) {
if (tabs.length === 0) {
return;
}
currentID = tabs[0].id;
refresherID = setInterval(function() {
chrome.tabs.reload(currentID, { bypassCache: true }, function() {
chrome.tabs.executeScript(currentID, {
file: 'content.js',
runAt: 'document_idle',
allFrames: false
}, function(results) {
if (chrome.runtime.lastError) {
alert('ERROR:\n' + chrome.runtime.lastError.message);
return;
} else if (results.length === 0) {
alert('ERROR: No results !');
return;
}
var nIndyJobs = results[0].nIndyJobs;
var nGroupJobs = results[0].nGroupJobs;
$('.lt').text('Indy: ' + nIndyJobs + '; '
+ 'Grouped: ' + nGroupJobs);
});
});
}, 5000);
});
});
$('.stop-button').click(function(){
clearInterval(refresherID);
});
});
content.js:
(function() {
function getNumberOfIndividualJobs() {...}
function getNumberOfGroupedJobs() {...}
function comparator(grouped, individual) {
var IndyJobs = getNumberOfIndividualJobs();
var GroupJobs = getNumberOfGroupedJobs();
nIndyJobs = IndyJobs[1];
nGroupJobs = GroupJobs[1];
console.log(GroupJobs);
return {
nIndyJobs: nIndyJobs,
nGroupJobs: nGroupJobs
};
}
var currentGroupedHTML = $(".grouped_jobs").html();
var currentIndividualHTML = $(".individual_jobs").html();
var result = comparator(currentGroupedHTML, currentIndividualHTML);
return result;
})();