Gulpfile end of task with two different operations - gulp

I am kind of lacking imagination on that one.
My goal is to retrieve a json object so I can run a replace string on all the files I want to translate, I have looked into a lot of translation libraries but this way is the best i can think of for my use.
Anyway my issue here is Once I got my json object, I have to run on all the files and when it is done, finish the task 'trad'.
I have done some research and tried a lot of things but there is something that I miss, something that I didn't understood about the good way to do that ?
Please help !
gulp.task('trad', gulp.series( 'createTradFile', 'copyBeforeTrad', function( done ) {
var data = require('gulp-data');
var path = require('path');
var fs = require('fs');
var replace2 = require('gulp-string-replace');
var transObj = null;
var translateAll = function()
{
var files = gulp.src(['fr/**/*.html', 'fr/**/*.js']);
for (var k in transObj)
{
if (transObj[k].ID)
{
console.log("TRAD " + transObj[k].ID + " TO " + transObj[k].LANG1);
files.pipe(replace2(new RegExp('\\+' + transObj[k].ID + '\\+', 'g'),
transObj[k].LANG1,
{'logs': {'enabled': true}}))
.pipe(chmod(755));
}
}
files.pipe(gulp.dest("fr"))
.on('end', done);
};
gulp.src('distTemp/wording.json')
.pipe(data(function(file) {
transObj = JSON.parse( fs.readFileSync('distTemp/' + path.basename(file.path)));
console.log("TRAD first part OK");
translateAll();
}));
}));
So this code will translate like I want it too, but the task does not end :
[16:38:34] The following tasks did not complete: trad, <anonymous>
[16:38:34] Did you forget to signal async completion?

So, after a bit of research I found this ( almost crappy ) solution, which do the trick ( please answer if you hava a better solution )
var transObj = null;
gulp.task("retrieveTradObject", function(){
var data = require('gulp-data');
var path = require('path');
var fs = require('fs');
return gulp.src('distTemp/wording.json')
.pipe(data(function(file) {
transObj = JSON.parse( fs.readFileSync('distTemp/' + path.basename(file.path)));
console.log("TRAD first part OK");
}));
});
gulp.task('trad', gulp.series( 'createTradFile', 'copyBeforeTrad', 'retrieveTradObject', function( done ) {
var replace2 = require('gulp-string-replace');
var files = gulp.src(['fr/**/*.html', 'fr/**/*.js']);
for (var k in transObj)
{
if (transObj[k].ID)
{
console.log("TRAD " + transObj[k].ID + " TO " + transObj[k].LANG1);
files = files.pipe(replace2(new RegExp('\\+' + transObj[k].ID + '\\+', 'g'),
transObj[k].LANG1,
{'logs': {'enabled': true}}))
.pipe(chmod(755));
}
}
files.pipe(gulp.dest("fr"));
return files;
}));
So main idea here was to separate the two promises into task ( mainly for a better understanding of the code for later ) and then to do the files = files.pipe( ... ) Which is explained here : How to create repeating pipe in gulp?
Hope this can help !

I'm not sure I understand the question 100% so I'll take the dv's, but are talking about something like gulp-run-sequence?
You can do all sorts of tasking stuff like this
var gulp = require('gulp');
//webp images for optimization on some browsers
const webp = require('gulp-webp');
//responsive images!
var responsive = require('gulp-responsive-images');
//gulp delete for cleaning
var del = require('del');
//run sequence to make sure each gulp command completes in the right order.
var runSequence = require('run-sequence');
// =======================================================================//
// ! Default and bulk tasks //
// =======================================================================//
//default runs when the user types 'gulp' into CLI
//first clean is ran, then webp, then the rest are ran async.
//If you want something ran after, you can add something like 'example'
gulp.task('default',function(callback){
runSequence('clean','webp',['responsive-jpg','responsive-webp','copy-data','copy-sw'],'example'),callback
});
// =======================================================================//
// Images and fonts //
// =======================================================================//
gulp.task('responsive-jpg',function(){
gulp.src('src/images/*')
.pipe(responsive({
'*.jpg':[
{width:1600, suffix: '_large_1x', quality:40},
{width:800, suffix: '_medium_1x', quality:70},
{width:550, suffix: '_small_1x', quality:100}
]
}))
.pipe(gulp.dest('build/images'));
});
gulp.task('responsive-webp',function(){
gulp.src('src/images/*')
.pipe(responsive({
'*.webp':[
{width:1600, suffix: '_large_1x', quality:40},
{width:800, suffix: '_medium_1x', quality:70},
{width:550, suffix: '_small_1x', quality:80}
]
}))
.pipe(gulp.dest('build/images'));
});
gulp.task('webp', () =>
gulp.src('src/images/*.jpg')
.pipe(webp())
.pipe(gulp.dest('src/images'))
);
gulp.task('copy-data', function () {
gulp.src('./src/data/*.json')
.pipe(gulp.dest('./build/data'));
});
gulp.task('copy-sw', function () {
gulp.src('./src/sw.js')
.pipe(gulp.dest('./build/'));
});
In my example here, I clear out old files, then I convert any images that need to be converted to webp, then I async the tasks that can be run together. You can do this in any arrangement you need. You could create then a gulp task that even points to two gulp run sequence tasks to double down on the effectiveness.

Related

gulp-concat is adding same files twice

I'm seeing a similar issue as this post (gulp-concat twice the content).
However, I'm dumping the concatenated file into a different directory, so it's not pulling in the resulting concatenated file into task, yet I'm seeing the contents of each file doubling up for some reason.
My gulp file is as follows:
/* jshint node: true */
module.exports = function (gulp, options, plugins) {
var merge = require('merge-stream');
var uglify = require('gulp-uglify');
var pump = require('pump');
var gp_concat = require('gulp-concat');
var gp_rename = require('gulp-rename');
var gp_ignore = require('gulp-ignore');
var ngAnnotate = require('gulp-ng-annotate');
var paths = require('../paths');
var utils = require('../utils');
var base = [
paths.APP,
paths.ETC,
paths.DESIGN
];
gulp.task('scripts:clean', function () {
var srcOptions = {
read: false
};
var tasks = base.map(function (folder) {
return gulp.src(folder + '/**/' + paths.GENERATED_SUBPATH + '/js/**/*.js', srcOptions)
.pipe(plugins.clean({force: true}));
});
return merge(tasks);
});
gulp.task('compress', function () {
var filesToInclude = ['**/app/components/**/*.js'
];
var excludeCondition = '**/*.spec*.js'
var fileToDest = paths.GLOBAL + '/'+paths.GENERATED_SUBPATH + '/js';
return gulp.src(filesToInclude)
.pipe(gp_ignore.exclude(excludeCondition))
.pipe(ngAnnotate({add: true}))
.pipe(gp_concat('all.concat.js'))
.pipe(gulp.dest('dist'))
.pipe(gp_rename('all.min.js'))
.pipe(uglify())
.pipe(gulp.dest(fileToDest));
});
gulp.task('scripts:build', ['scripts:clean', 'compress']);
};
Can someone help me understand why the
var filesToInclude = ['**/app/components/**/*.js];
would bring in each file twice? I've checked the files and no, the files are not duplicated anywhere in there.
It seems that the issue was with the definition of the filesToInclude, with it starting out with a wildcard. Since the fileToDest puts the file in a separate target directory, but the structure is the same, the process picks up the files twice.

Gulp default task unable to compress after copy

At first I thought this was related to dependency of tasks so I went with run-sequence and even tried defining dependencies within tasks themselves. But I cannot get the compress task to run after copy. Or, even if it says it did finish the compress task, the compression only works if I run compress in the task runner inside visual studio by itself. What else can I try to get it to compress after copy?
/// <binding BeforeBuild='default' />
/*
This file is the main entry point for defining Gulp tasks and using Gulp plugins.
Click here to learn more. https://go.microsoft.com/fwlink/?LinkId=518007
*/
var gulp = require("gulp");
var debug = require("gulp-debug");
var del = require("del");
var uglify = require("gulp-uglify");
var pump = require("pump");
var runSequence = require("run-sequence");
var paths = {
bower: "./bower_components/",
lib: "./Lib/"
};
var modules = {
"store-js": ["store-js/dist/store.legacy.js"],
"bootstrap-select": [
"bootstrap-select/dist/css/bootstrap-select.css",
"bootstrap-select/dist/js/bootstrap-select.js",
"bootstrap-select/dist/js/i18n/*.min.js"
]
}
gulp.task("default", function (cb) {
runSequence("clean", ["copy", "compress"], cb);
});
gulp.task("clean",
function () {
return del.sync(["Lib/**", "!Lib", "!Lib/ReadMe.md"]);
});
gulp.task("compress",
function (cb) {
pump([
gulp.src(paths.lib + "**/*.js"),
uglify(),
gulp.dest(paths.lib)
], cb);
});
gulp.task("copy",
function (cb) {
prefixPathToModules();
copyModules();
cb();
});
function prefixPathToModules() {
for (var moduleIndex in modules) {
for (var fileIndex in modules[moduleIndex]) {
modules[moduleIndex][fileIndex] = paths.bower + modules[moduleIndex][fileIndex];
}
}
}
function copyModules() {
for (var files in modules) {
gulp.src(modules[files], { base: paths.bower })
.pipe(gulp.dest(paths.lib));
}
}
You use run-sequence and your code
runSequence("clean", ["copy", "compress"], cb);
run in such order
clean
copy and compress in parallel // that's why your code compresses nothing, because you have not copied files yet
cb
Write like this and compress will be after copy
runSequence("clean", "copy", "compress", cb);
I am not familiar with runSequence. But why don't you try the following. By this way your default task depends on compress and compress depends on copy. So, 'copy' will run first and then 'compress'
gulp.task('default', ['copy','compress'], function(cb){});
gulp.task('compress',['copy'], function(cb){});
Gulp returns a steam , since you are calling it in a for loop the stream is returned during the first iteration itself.
Update your copyModule to the following and you can try either runSequence like posted by Kirill or follow my approach
function copyModules() {
var inputFileArr = [];
for (var files in modules) {
inputFileArr = inputFileArr.concat(modules[files]);
};
return gulp.src(inputFileArr, { base: paths.bower })
.pipe(gulp.dest(paths.lib));
}

Gulp Browserify with glob and uglify/factor-bundle

I'm currently getting into browserify. I like it so far but before I start using it I want to automate it. Gulp is the build system of my choice.
So what I actually want to do is:
Get js/app/**.js, bundle it to js/bundle/ and extract common dependencies into js/bundle/common.js. In addition uglify everything and add source maps.
Well. The gulp support for browserify kinda seems poor, at least my google researches were pretty disappointing.
Anyway. What I've got so far.
var gulp = require('gulp'),
browserify = require('browserify'),
factor = require('factor-bundle');
// ...
// gulp task
return browserify({
entries: ['js/app/page1.js', 'js/app/page2.js'],
debug: true
})
.plugin(factor, {
o: ['js/bundle/page1.js', 'js/bundle/page2.js']
})
.bundle()
.pipe(source('common.js'))
.pipe(gulp.dest('js/bundle/'));
Well this is neither uglifying nor adding sourcemaps and much less using a glob pattern. I can find an official recipe which shows me how to use the pipe to add additional transformations like uglify. But it's only for a single file.
as an outputs parameter to factor-bundle, use streams instead of file paths. You can do whatever you want with the streams then.
var indexStream = source("index.js");
var testStream = source("tests.js");
var commonStream = bundler.plugin('factor-bundle', { outputs: [indexStream, testStream] })
.bundle()
.pipe(source('common.js'));
return merge(indexStream, commonStream, testStream)
.pipe(buffer())
.pipe(sourcemaps.init({ debug: true, loadMaps: true }))
.pipe(uglify())
.pipe(gulp.dest('js/bundle/'))
Thanks to Liero's answer, I got something very similar working. Here's the complete gulpfile:
const gulp = require('gulp');
const browserify = require('browserify');
const factor = require('factor-bundle');
const source = require('vinyl-source-stream');
const sourcemaps = require('gulp-sourcemaps');
const buffer = require('gulp-buffer');
const merge = require('gulp-merge');
gulp.task('bfb', function () {
const fejs = 'public/javascripts/' // location of source JS
const fejsb = fejs + 'b/'; // location of bundles
const modules = [ // aka entry points
'accounts',
'invoice',
'invoices',
// etc...
];
const inputs = [];
const streams = [];
modules.forEach(function (module) {
inputs.push(fejs + module + '.js');
streams.push(source(module + '.js'));
});
const bundler = browserify(inputs, {});
const commonStream = bundler.plugin(factor, { outputs: streams })
.bundle()
.pipe(source('common.js'));
streams.push(commonStream);
return merge(streams)
.pipe(buffer())
.pipe(sourcemaps.init({loadMaps: true}))
//.pipe(uglify()) // haven't tested this bit
.pipe(sourcemaps.write('./'))
.pipe(gulp.dest(fejsb));
});

Using gulp-minify-html and gulp-html-replace together

I am using Gulp with gulp-minify-html and gulp-html-replace:
var minifyhtml = require('gulp-minify-html');
var htmlreplace = require('gulp-html-replace');
var dev_paths = {
HTML: dev + '/**/*.html'
};
var prod_paths = {
RELATIVE_CSS: ['css/bootstrap.css', 'css/font-awesome.css', 'css/c3.css', 'css/main.css'],
};
//Compress HTML
gulp.task('minify-html', function () {
var opts = {
empty: true,
comments: true
};
return gulp.src(dev_paths.HTML)
.pipe(minifyhtml(opts))
.pipe(gulp.dest(prod + '/'));
});
//Add call to the JS and CSS in the HTML files
gulp.task('replace-files', function() {
gulp.src(dev_paths.HTML)
.pipe(htmlreplace({
'css': prod_paths.RELATIVE_CSS,
'js': 'js/script.js'
}))
.pipe(gulp.dest('public/prod/'));
});
gulp.task('prod',['replace-files','minify-html'], function(){
})
However, the HTML doesn't replace the CSS and JS files I specified with task replace-files. When I run gulp without the task minify-html, it works fine though.
Does anyone knows why using both tasks replace-files and minify-html together is not working?
Thank you.
As the tasks run in parallel it is likely the 'minify-html' task is running before the 'replace-files' task is complete.
Try using run-sequence to ensure the tasks run in the required order.

Running pipeline section multiple times with different arguments for concat step

I'm a fan of the files object format
files: {
'dest/a.js': ['src/aa.js', 'src/aaa.js'], // key: value
'dest/a1.js': ['src/aa1.js', 'src/aaa1.js'],
}
I have a gulp task that concats source files like
gulp.task('cat', function() {
gulp.src( <value-goes-here> )
.
<many pipeline steps>
.
.pipe(concat(<key-goes-here>))
.pipe(gulp.dest('target/')
.
<more pipeline steps to be run on 'dest/a.js' and 'dest/a1.js'>
.
});
Is there a streaming way to extend this task so that I get 1 bundle file for each key-value in files ?
I would like to NOT create one task per key-value pair, as I would like to continue piping more steps even after the last .pipe(gulp.dest('target/');
If I'm approaching this problem in wrong way, is there a better way?
Thank you in advance!
Rob Rich's answer works, Heres working version :
var Q = require('q');
var gulp = require('gulp');
var concat = require('gulp-concat');
var files = {
'a.js': ['src/aa.js', 'src/aaa.js'],
'a1.js': ['src/aa1.js', 'src/aaa1.js'],
};
gulp.task('cat', function() {
var promises = Object.keys(files).map(function (key) {
var deferred = Q.defer();
var val = files[key];
console.log(val);
gulp.src(val)
.pipe(concat(key))
.pipe(gulp.dest('dest/'))
.on('end', function () {
deferred.resolve();
});
return deferred.promise;
});
return Q.all(promises);
});
Try this:
var Q = require('q');
gulp.task('cat', function() {
var promises = Object.keys(files).map(function (key) {
var deferred = Q.defer();
var val = files[key];
gulp.src(val)
.
<many pipeline steps>
.
.pipe(concat(key))
.pipe(gulp.dest('target/')
.
<more pipeline steps to be run on 'dest/a.js' and 'dest/a1.js'>
.
.on('end', function () {
deferred.resolve();
});
return deferred.promise;
});
return Q.all(promises);
});
You can also accomplish a similar scenario by using streams instead of promises by using combined-stream or stream-combiner packages.