I am writing my own gulp plugin which looks like this...
var through2 = require('through2');
var order = require('gulp-order');
module.exports = function() {
return through2.obj(function(file, encoding, callback) {
callback(null, transform(file));
});
};
function transform(file) {
// I will modify file.contents here - its ok
return file;
}
and I would like to apply some other gulp plugin on my buffer which came from gulp.src. Is it possible using through2? For example before calling through2.obj() I would like to apply gulp-order plugin - how can I do this?
If you want to chain different gulp plugins together lazypipe is generally is good option:
var through2 = require('through2');
var order = require('gulp-order');
function yourPlugin()
return through2.obj(function(file, encoding, callback) {
callback(null, transform(file));
});
}
function transform(file) {
// I will modify file.contents here - its ok
return file;
}
function orderPlugin()
return order(['someFolder/*.js', 'someOtherFolder/*.js']);
}
module.exports = function() {
return lazypipe().pipe(orderPlugin).pipe(yourPlugin)();
};
Related
I try following code.
var gulp = require("gulp");
var ps = require('child_process').exec;
var watch = require('gulp-watch');
gulp.task('exec_file', function() {
var command = "/mnt/c/pg/expect/folder_sync";
ps(command , function (err, stdout, stderr) {
console.log(stdout);
});
});
gulp.task("watch", function() {
var targets = [
'./**'
];
return watch(targets, ['exec_file']);
});
However the code make a error.
What should I do?
Inside of the watch, you have to inform Gulp which task to start.
Replace the following line:
return watch(targets, ['exec_file']);
with this:
watch(targets, function(){
gulp.start('exec_file');
})
or with this:
watch(targets).on('change', function(){ gulp.start('exec_file')});
P.S I'm not sure if you have to return anything.
At first I thought this was related to dependency of tasks so I went with run-sequence and even tried defining dependencies within tasks themselves. But I cannot get the compress task to run after copy. Or, even if it says it did finish the compress task, the compression only works if I run compress in the task runner inside visual studio by itself. What else can I try to get it to compress after copy?
/// <binding BeforeBuild='default' />
/*
This file is the main entry point for defining Gulp tasks and using Gulp plugins.
Click here to learn more. https://go.microsoft.com/fwlink/?LinkId=518007
*/
var gulp = require("gulp");
var debug = require("gulp-debug");
var del = require("del");
var uglify = require("gulp-uglify");
var pump = require("pump");
var runSequence = require("run-sequence");
var paths = {
bower: "./bower_components/",
lib: "./Lib/"
};
var modules = {
"store-js": ["store-js/dist/store.legacy.js"],
"bootstrap-select": [
"bootstrap-select/dist/css/bootstrap-select.css",
"bootstrap-select/dist/js/bootstrap-select.js",
"bootstrap-select/dist/js/i18n/*.min.js"
]
}
gulp.task("default", function (cb) {
runSequence("clean", ["copy", "compress"], cb);
});
gulp.task("clean",
function () {
return del.sync(["Lib/**", "!Lib", "!Lib/ReadMe.md"]);
});
gulp.task("compress",
function (cb) {
pump([
gulp.src(paths.lib + "**/*.js"),
uglify(),
gulp.dest(paths.lib)
], cb);
});
gulp.task("copy",
function (cb) {
prefixPathToModules();
copyModules();
cb();
});
function prefixPathToModules() {
for (var moduleIndex in modules) {
for (var fileIndex in modules[moduleIndex]) {
modules[moduleIndex][fileIndex] = paths.bower + modules[moduleIndex][fileIndex];
}
}
}
function copyModules() {
for (var files in modules) {
gulp.src(modules[files], { base: paths.bower })
.pipe(gulp.dest(paths.lib));
}
}
You use run-sequence and your code
runSequence("clean", ["copy", "compress"], cb);
run in such order
clean
copy and compress in parallel // that's why your code compresses nothing, because you have not copied files yet
cb
Write like this and compress will be after copy
runSequence("clean", "copy", "compress", cb);
I am not familiar with runSequence. But why don't you try the following. By this way your default task depends on compress and compress depends on copy. So, 'copy' will run first and then 'compress'
gulp.task('default', ['copy','compress'], function(cb){});
gulp.task('compress',['copy'], function(cb){});
Gulp returns a steam , since you are calling it in a for loop the stream is returned during the first iteration itself.
Update your copyModule to the following and you can try either runSequence like posted by Kirill or follow my approach
function copyModules() {
var inputFileArr = [];
for (var files in modules) {
inputFileArr = inputFileArr.concat(modules[files]);
};
return gulp.src(inputFileArr, { base: paths.bower })
.pipe(gulp.dest(paths.lib));
}
I'm wanting to only load one JS file which uses jQuery code, but am confused about how to best do it. The thing I'm worried about is doing something sloppy like the below to solve the issue of loading all the scripts under $(document).ready(function(){});
gulp.task('compile-js', function() {
gulp.src(['./js/initialization.js', './stuff.js'./js/end.js'])
.pipe(concat('script.js'))
.pipe(gulp.dest('./public/javascripts/'));
});
where initialization.js and end.js are for the wrapping of the document.ready function (I know lol, hence asking)
Is there a better way of doing it?
Write a gulp file, lets call it 'jquery-noconflict.js'
var through = require('through2');
var gutil = require('gulp-util');
var fs = require('fs');
module.exports = function(){
var stream = through.obj(function(file, enc, cb) {
if (file.isStream()) {
this.emit('error', new PluginError(PLUGIN_NAME, 'Streams are not supported!'));
return cb();
}
if (file.isBuffer()) {
var contents = file.contents.toString();
file.contents = Buffer.concat([new Buffer('jQuery(document).ready(function(){'), file.contents, new Buffer('})')]);
}
cb(null, file);
}, function(){
})
return stream;
};
You might need to 'npm install through2'
now in your gulpfile.js
var gulp = require('gulp');
var concat = require('gulp-concat');
var jquery = require('./jquery-noconflict');
gulp.task('compile-js', function(){
gulp.src('./stuff.js')
.pipe(concat('script.js'))
.pipe(jquery())
.pipe(gulp.dest('./public/javascripts/'))
})
I'm a bit stumped with gulp. Based on the docs, in order to get sequential execution, I should be returning the stream from my tasks, so i tried to do the below for my gulpfile. But as best I can tell, there's a race condition. Half the time I get ENOENT, lstat errors, the other half it succeeds, but my deployDir has weird folder names and missing files all over.. Am I missing something? Is there a trick to this?
var gulp = require('gulp'),
filter = require('gulp-filter'),
mainBowerFiles = require('main-bower-files'),
del = require('del'),
inject = require("gulp-inject"),
uglify = require('gulp-uglifyjs');
var config = {
bowerDir: 'src/main/html/bower_components',
cssDir: 'src/main/html/css/lib',
fontsDir: 'src/main/html/fonts/lib',
imgDir: 'src/main/html/img/lib',
jsDir: 'src/main/html/js/lib',
deployDir: 'src/main/resources/html'
};
gulp.task('default', ['clean', 'bowerdeps', 'dev']);
gulp.task('clean', function() {
return del([
config.cssDir,
config.fontsDir,
config.jsDir,
config.deployDir
]);
});
gulp.task('dev', function() {
return gulp
.src(['src/main/html/**', '!src/main/html/{bower_components,bower_components/**}'])
.pipe(gulp.dest(config.deployDir));
});
gulp.task('bowerdeps', function() {
var mainFiles = mainBowerFiles();
if(!mainFiles.length) return; // No files found
var jsFilter = filterByRegex('.js$');
var cssFilter = filterByRegex('.css$');
var fontFilter = filterByRegex('.eot$|.svg$|.ttf$|.woff$');
return gulp
.src(mainFiles)
.pipe(jsFilter)
.pipe(gulp.dest(config.jsDir))
.pipe(jsFilter.restore())
.pipe(cssFilter)
.pipe(gulp.dest(config.cssDir))
.pipe(cssFilter.restore())
.pipe(fontFilter)
.pipe(gulp.dest(config.fontsDir));
});
// Utility Functions
var filterByRegex = function(regex){
return filter(function(file){
return file.path.match(new RegExp(regex));
});
};
Dependencies run always parallel: ['clean', 'bowerdeps', 'dev'].
https://github.com/gulpjs/gulp/blob/master/docs/recipes/running-tasks-in-series.md
You can use run-sequence for sequencing tasks.
Other thing: del doesn't return a stream. Use callback instead:
gulp.task('clean', function(cb) {
del([
config.cssDir,
config.fontsDir,
config.jsDir,
config.deployDir
], cb);
});
I'm a fan of the files object format
files: {
'dest/a.js': ['src/aa.js', 'src/aaa.js'], // key: value
'dest/a1.js': ['src/aa1.js', 'src/aaa1.js'],
}
I have a gulp task that concats source files like
gulp.task('cat', function() {
gulp.src( <value-goes-here> )
.
<many pipeline steps>
.
.pipe(concat(<key-goes-here>))
.pipe(gulp.dest('target/')
.
<more pipeline steps to be run on 'dest/a.js' and 'dest/a1.js'>
.
});
Is there a streaming way to extend this task so that I get 1 bundle file for each key-value in files ?
I would like to NOT create one task per key-value pair, as I would like to continue piping more steps even after the last .pipe(gulp.dest('target/');
If I'm approaching this problem in wrong way, is there a better way?
Thank you in advance!
Rob Rich's answer works, Heres working version :
var Q = require('q');
var gulp = require('gulp');
var concat = require('gulp-concat');
var files = {
'a.js': ['src/aa.js', 'src/aaa.js'],
'a1.js': ['src/aa1.js', 'src/aaa1.js'],
};
gulp.task('cat', function() {
var promises = Object.keys(files).map(function (key) {
var deferred = Q.defer();
var val = files[key];
console.log(val);
gulp.src(val)
.pipe(concat(key))
.pipe(gulp.dest('dest/'))
.on('end', function () {
deferred.resolve();
});
return deferred.promise;
});
return Q.all(promises);
});
Try this:
var Q = require('q');
gulp.task('cat', function() {
var promises = Object.keys(files).map(function (key) {
var deferred = Q.defer();
var val = files[key];
gulp.src(val)
.
<many pipeline steps>
.
.pipe(concat(key))
.pipe(gulp.dest('target/')
.
<more pipeline steps to be run on 'dest/a.js' and 'dest/a1.js'>
.
.on('end', function () {
deferred.resolve();
});
return deferred.promise;
});
return Q.all(promises);
});
You can also accomplish a similar scenario by using streams instead of promises by using combined-stream or stream-combiner packages.