Gulp task module export declaration - gulp

A lot of examples of gulp setups are using the common JS pattern. Gulp tasks are defined follows:
myGulpTask.js
const gulp = require('gulp');
const paths = {
src = './src',
dest = './dest'
}
const myGulpTask = function() {
return gulp.src(paths.srcFoo)
.pipe() // do stuff
.pipe(gulp.dest(paths.dest));
}
module.exports = myGulpTask;
gulp.task('my-gulp-task', myGulpTask);
This allows you to use this gulp task using:
$ npm run gulp myGulpTask
But since the task is directly assigned, would it make sense to define the export as follows:
//...
const myGulpTask = module.exports = function() {
return gulp.src(paths.srcFoo)
.pipe() // do stuff
.pipe(gulp.dest(paths.dest))
}
gulp.task('my-gulp-task', myGulpTask);
//...
Maybe it's sweating the small stuff, or is there a difference in these two module declarations?

There is no difference in either way, the first one is more friendly and easy to read.

Related

Gulp rollup in parrallel task and series

I have a gulpfile as below with a rollupTask. But at the last task which zipTask, it output without bundled js from rollup. The only way i found that fix this is to add wait time before the ziptask. There seems to be a fraction of delay with rollup output and the next gulp series. Is this the expected behavior or there is something that fix this without add waiting time ? Is my rollupTask is correct ? the zip tasks simply zip the output folder into a different folder. The output folder itself contain the expected bundle.
const gulp = require('gulp');
const rollup = require('rollup');
async function rollupTask() {
const rollupBuild = await rollup({
input: 'index.js',
plugins: rollupPlugins,
});
await rollupBuild.write({
file: 'bundle.js',
format: 'es',
sourcemap: true,
});
await rollupBuild.close();
}
exports.default = series(taskOne, parallel(taskTwo, taskThree, rollupTask), zipTask);
The easiest way is to use a plugin written for gulp. gulp-rollup
const { src, dest } = require('gulp');
const rollup = require('gulp-rollup');
function rollupTask() {
const options = { input: './src/main.js' } // any option supported by Rollup can be set here.
return src('./src/**/*.js')
.pipe(rollup(options)) // transform the files here.
.pipe(dest('./dist'));
}
exports.build = series(rollupTask);
On start: gulp build

Why is Gulp concatenating my output in the wrong order?

As shown in the following gulpfile.js, I am trying to compile jQuery, bootstrap.js, and a collection of Javascript snippets from a subfolder into a single app.js output file. It is working, except that the snippets from the subfolder are appearing at the top of the app.js output file, prior to jQuery being loaded.
How can I ensure that these files are output in the correct order?
const { src, dest, watch, series, parallel } = require('gulp');
const concat = require('gulp-concat');
const uglify = require('gulp-uglify');
var merge2 = require('merge2');
const files = {
jsSrcPath: [
'../node_modules/jquery/dist/jquery.js',
'../node_modules/bootstrap/dist/js/bootstrap.js',
'js/*.js'
],
jsDstPath: '../public/js'
}
function jsTask(){
return merge2(files.jsSrcPath.map(function (file) {
return src(file)
}))
.pipe(concat('app.js'))
.pipe(uglify())
.pipe(dest(files.jsDstPath));
}
function watchTask(){
watch(files.jsSrcPath, jsTask);
}
exports.default = series(
jsTask,
watchTask
);
There's something internal here going on, in my tests I saw the order was sometimes random, sometimes based on modification time, sometimes in order. In any case, best to use a tool to ensure our streams are always in the order we want them.
gulp-order exists for this purpose. It can take specific paths and glob syntax, which you already have, so you can re-pass that to the plugin:
const { src, dest, watch, series, parallel } = require('gulp');
const concat = require('gulp-concat');
const uglify = require('gulp-uglify');
const order = require('gulp-order'); // Added
var merge2 = require('merge2');
const files = {
jsSrcPath: [
'../node_modules/jquery/dist/jquery.js',
'../node_modules/bootstrap/dist/js/bootstrap.js',
'js/*.js'
],
jsDstPath: 'dist'
}
function jsTask() {
return merge2(files.jsSrcPath.map(function (file) {
return src(file)
}))
.pipe(order(files.jsSrcPath)) // Added
.pipe(concat('app.js'))
.pipe(uglify())
.pipe(dest(files.jsDstPath));
}
function watchTask() {
watch(files.jsSrcPath, jsTask);
}
exports.default = series(
jsTask,
watchTask
);

Using package.json dependencies as folderstructure in gulpfile.js

I'm trying to create an automated process for including node modules in my projects. Some modules have css included, and so I'm trying to make gulpfile.js in a way it can read those modules and include the css of that module.
I try to be selective and have only the folders selected that I install as a dependency. Not the entire node_modules folder.
My gulpfile.js:
// Import (not showing all for the sake of the question)
const sourcemaps = require('gulp-sourcemaps');
const sass = require('gulp-sass');
const postcss = require('gulp-postcss');
const autoprefixer = require('autoprefixer');
const cssnano = require('cssnano');
const fs = require('fs');
const json = JSON.parse(fs.readFileSync('./package.json'));
// File paths
const files = {
cssPath: 'assets/styles/**/*.scss',
jsPath: 'assets/scripts/**/*.js',
imgPath: 'assets/images/**/*',
modulesPath: ['node_modules']+json.dependencies+'/**/*.scss'
//Desired output: node_modules/module_name/all_folders/all.scss
}
// Compile CSS
function styles(){
return src([files.cssPath, files.modulesPath])
.pipe(sourcemaps.init())
.pipe(sass())
.pipe(postcss([ autoprefixer(), cssnano() ]))
.pipe(sourcemaps.write('.'))
.pipe(dest('dist/styles')
);
}
When I run "gulp styles" the function runs just fine, but the desired styles are not included. What am I doing wrong?
First, there's an easier way to get your package.json file:
const package = require('./package.json');
Then you need the names of your dependencies, which are the keys in the dependencies object. Map these to a glob, like this:
const files = {
...
modulesPath: Object.keys(package.dependencies).map(module => `node_modules/${module}/**/*.scss`)
}
Lastly, destructure that array in your styles task:
return src([files.cssPath, ...files.modulesPath])

Gulp default task unable to compress after copy

At first I thought this was related to dependency of tasks so I went with run-sequence and even tried defining dependencies within tasks themselves. But I cannot get the compress task to run after copy. Or, even if it says it did finish the compress task, the compression only works if I run compress in the task runner inside visual studio by itself. What else can I try to get it to compress after copy?
/// <binding BeforeBuild='default' />
/*
This file is the main entry point for defining Gulp tasks and using Gulp plugins.
Click here to learn more. https://go.microsoft.com/fwlink/?LinkId=518007
*/
var gulp = require("gulp");
var debug = require("gulp-debug");
var del = require("del");
var uglify = require("gulp-uglify");
var pump = require("pump");
var runSequence = require("run-sequence");
var paths = {
bower: "./bower_components/",
lib: "./Lib/"
};
var modules = {
"store-js": ["store-js/dist/store.legacy.js"],
"bootstrap-select": [
"bootstrap-select/dist/css/bootstrap-select.css",
"bootstrap-select/dist/js/bootstrap-select.js",
"bootstrap-select/dist/js/i18n/*.min.js"
]
}
gulp.task("default", function (cb) {
runSequence("clean", ["copy", "compress"], cb);
});
gulp.task("clean",
function () {
return del.sync(["Lib/**", "!Lib", "!Lib/ReadMe.md"]);
});
gulp.task("compress",
function (cb) {
pump([
gulp.src(paths.lib + "**/*.js"),
uglify(),
gulp.dest(paths.lib)
], cb);
});
gulp.task("copy",
function (cb) {
prefixPathToModules();
copyModules();
cb();
});
function prefixPathToModules() {
for (var moduleIndex in modules) {
for (var fileIndex in modules[moduleIndex]) {
modules[moduleIndex][fileIndex] = paths.bower + modules[moduleIndex][fileIndex];
}
}
}
function copyModules() {
for (var files in modules) {
gulp.src(modules[files], { base: paths.bower })
.pipe(gulp.dest(paths.lib));
}
}
You use run-sequence and your code
runSequence("clean", ["copy", "compress"], cb);
run in such order
clean
copy and compress in parallel // that's why your code compresses nothing, because you have not copied files yet
cb
Write like this and compress will be after copy
runSequence("clean", "copy", "compress", cb);
I am not familiar with runSequence. But why don't you try the following. By this way your default task depends on compress and compress depends on copy. So, 'copy' will run first and then 'compress'
gulp.task('default', ['copy','compress'], function(cb){});
gulp.task('compress',['copy'], function(cb){});
Gulp returns a steam , since you are calling it in a for loop the stream is returned during the first iteration itself.
Update your copyModule to the following and you can try either runSequence like posted by Kirill or follow my approach
function copyModules() {
var inputFileArr = [];
for (var files in modules) {
inputFileArr = inputFileArr.concat(modules[files]);
};
return gulp.src(inputFileArr, { base: paths.bower })
.pipe(gulp.dest(paths.lib));
}

Browserify only if lint passes in Gulp

I'm trying to achieve this Gulp stream:
It seems like a fairly straight-forward process, but from what I can tell, it is not possible to implement as a Gulp stream.
I'm currently doing this:
gulp.task('js', function () {
return browserify('foo/main.js')
.bundle()
.pipe(source('bundle.js'))
.pipe(streamify(jshint()))
.pipe(jshint.reporter('default'))
// source map, minify, …
});
The problem is that JSHint should run first, only on the changed file, and the process should be aborted if the lint fails. In my setup, Browserify always runs, and only then JSHint runs on the entire bundle. I can deal with the performance penalty, but the JSHint's line numbers correspond to the generated bundle, and not my JS source files, which is a pain.
This is a cool idea. I've implemented this into my pipeline using watchify, which will lint files using the default reporter, and use the fail reporter if the file changed didn't pass the lint test. Even though this is recommended in the question, personally I would avoid doing this as what you really want is just for your reporter to emit lint checks, whilst keeping the development watcher still spawned in the background. Otherwise you have to keep restarting the task, which would generally tend to bug me. Anyway, here's the code:
'use strict';
var assign = require('object-assign'),
gulp = require('gulp'),
gutil = require('gulp-util'),
merge = require('merge-stream'),
jshint = require('gulp-jshint'),
source = require('vinyl-source-stream'),
watchify = require('watchify'),
browserify = require('browserify');
var resources = {
mainJS : 'main.js',
bundleJS : 'bundle.js',
root : 'www'
};
function res(r) {
return './' + resources[r];
}
gulp.task('watch', function() {
var bundler = watchify(browserify(res('mainJS'), assign(watchify.args, {
fullPaths: false
})));
var scripts = function(changedFiles) {
var compileStream = bundler
.bundle()
.on('error', gutil.log.bind(gutil, gutil.colors.red('Browserify Error\n')))
.pipe(source(res('bundleJS')))
.pipe(gulp.dest(res('root')));
if (changedFiles) {
var lintStream = gulp.src(changedFiles)
.pipe(jshint())
.pipe(jshint.reporter('default'))
.pipe(jshint.reporter('fail'));
return merge(lintStream, compileStream);
}
return compileStream;
};
bundler.on('update', scripts);
return scripts();
});
Note that this is based heavily off the official recipe for fast browserify builds with watchify (https://github.com/gulpjs/gulp/blob/master/docs/recipes/fast-browserify-builds-with-watchify.md), and is an 'all-in-one' type task; that is to say that I will generally spawn a single task somewhere off in the background, with minimal logging (I run gulp with the --silent flag), which is personally easier to deal with :-)