I have two folders both of which contain some html template files. I need to minify these files to separate folders.
folder structure
|src
|--clientTemplates
|----abc.html
|----xyz.html
|--serverTemplates
|----abc.html
|----xyz.html
required destination folder
|dist
|--client
|----abc.html
|----xyz.html
|--server
|----abc.html
|----xyz.html
following is my gulpfile where I have my tasks defined for the
var gulp = require('gulp');
var htmlmin = require('gulp-htmlmin');
var replace = require('gulp-replace');
var del = require('del');
var minOptions = {
collapseWhitespace: true,
minifyJS: { output: { quote_style: 1 } },
minifyCSS: true
};
gulp.task('clean', function(done) {
del(['dist'], done());
});
gulp.task('minify:serverTemplates', function() {
return gulp
.src('src/serverTemplates/*.html')
.pipe(htmlmin(minOptions))
.pipe(replace('\\', '\\\\'))
.pipe(replace('"', '\\"'))
.pipe(gulp.dest('dist/server'));
});
gulp.task('minify:clientTemplates', function() {
return gulp
.src('src/clientTemplates/*.html')
.pipe(htmlmin(minOptions))
.pipe(gulp.dest('dist/client'));
});
gulp.task(
'default',
gulp.series('clean', 'minify:serverTemplates', 'minify:clientTemplates', function inSeries(done) {
done();
})
);
when I run the gulp command it works fine for the first time, but throws errors on alternate runs.
running gulp command first time
running gulp command second time
can't figure out what exactly is wrong there.
Also is there a way to run the two minification task parallel once the clean task has finished?
thanks for the help.
The callback you pass to del is wrong. Just return the promise:
gulp.task('clean', function() {
return del(['dist']);
});
As for running the minification tasks in parallel, use gulp.parallel:
gulp.task(
'default',
gulp.series(
'clean',
gulp.parallel('minify:serverTemplates', 'minify:clientTemplates')
)
);
Related
Problems I'm facing:
The browser doesn't reflect live changes in .scss or .js
I don't understand whether we have to return the stream from the gulp.task() or not, I visited some websites and watched lectures some of which used return and some didn't.
Cannot understand the flow of execution of gulpfile (which statement runs first, then which and so on)
This is my current code of gulpfile.js.
"use strict";
var gulp = require('gulp');
var sass = require('gulp-sass');
var nodemon = require('gulp-nodemon');
var browserSync = require('browser-sync').create();
var uglify = require('gulp-uglify');
gulp.task('default', ['nodemon'], function(){
gulp.watch("src/sass/*.scss", ['sass']);
gulp.watch("src/js/*.js", ['js']);
gulp.watch("views/*.ejs").on('change',browserSync.reload); //Manual Reloading
})
// Process JS files and return the stream.
gulp.task('js', function () {
return gulp.src('src/js/*.js')
.pipe(uglify())
.pipe(gulp.dest('public/javascripts'));
});
// Compile SASS to CSS.
gulp.task('sass', function(){
// gulp.src('src/sass/*.scss') //without return or with return? why?
return gulp.src('src/sass/*.scss')
.pipe(sass().on('error', sass.logError))
.pipe(gulp.dest('public/stylesheets'))
.pipe(browserSync.stream());
});
// Setup proxy for local server.
gulp.task('browser-sync', ['js','sass'], function() {
browserSync.init(null, {
proxy: "http://localhost:3000",
port: 7000,
});
});
gulp.task('nodemon', ['browser-sync'], function(cb){
var running = false;
return nodemon({script: 'bin/www'}).on('start', function(){
if(!running)
{
running = true;
cb();
}
});
})
You may look at project structure at https://github.com/DivyanshBatham/GulpWorkflow
Try this:
"use strict";
var gulp = require('gulp');
var sass = require('gulp-sass');
var nodemon = require('gulp-nodemon');
var browserSync = require('browser-sync').create();
var uglify = require('gulp-uglify');
// First, run all your tasks
gulp.task('default', ['nodemon', 'sass', 'js'], function(){
// Then watch for changes
gulp.watch("src/sass/*.scss", ['sass']);
gulp.watch("views/*.ejs").on('change',browserSync.reload); //Manual Reloading
// JS changes need to tell browsersync that they're done
gulp.watch("src/js/*.js", ['js-watch']);
})
// create a task that ensures the `js` task is complete before
// reloading browsers
gulp.task('js-watch', ['js'], function (done) {
browserSync.reload();
done();
});
// Process JS files and return the stream.
gulp.task('js', function () {
return gulp.src('src/js/*.js')
.pipe(uglify())
.pipe(gulp.dest('public/javascripts'));
});
// Compile SASS to CSS.
gulp.task('sass', function(){
return gulp.src('src/sass/*.scss')
.pipe(sass().on('error', sass.logError))
.pipe(gulp.dest('public/stylesheets'))
.pipe(browserSync.stream());
});
// Setup proxy for local server.
gulp.task('browser-sync', ['js','sass'], function() {
browserSync.init(null, {
proxy: "http://localhost:3000",
port: 7000,
});
});
gulp.task('nodemon', ['browser-sync'], function(cb){
var running = false;
return nodemon({script: 'bin/www'}).on('start', function(){
if(!running)
{
running = true;
cb();
}
});
})
Also, you should consider adding the JS file to your index.ejs
Eg: <script src='/javascripts/main.js'></script>
More help: https://browsersync.io/docs/gulp
I'll answer what I can.
You don't always have to return the stream in a gulp task but you should since you have some tasks, like 'browser-sync' that are dependent on other tasks, ['js','sass'], finishing. The purpose of returning the stream is to signal task completion. It is not the only way to signal task completion but one easy way.
You are already doing this in your ['js','sass'] tasks with the return statements.
Your 'js' task needs a .pipe(browserSync.stream()); statement at the end of it like your 'sass' task. Or try .pipe(browserSync.reload({stream:true})); sometimes that variant works better.
You have both browser-sync and nodemon running - that I believe is unusual and may cause problems - they do much the same thing and do not typically see them running together. I would eliminate nodemon from your file.
Flow of execution:
(a) default calls ['nodemon', 'sass', 'js'] : these run in parallel.
(b) nodemon call 'browser-sync'. 'browserSync' must finish setting up before 'nodemon' gets into its function.
(c) 'browser-sync', ['js','sass'], Here browser-sync is dependent upon 'js' and 'sass' which run in parallel and must finish and signal that they have finished by returning the stream for example before browser-sync continues.
(d) After 'js', 'sass', 'browser-sync' and 'nodemon' have completed, your watch statements are set up and begin to watch.
I have the following gulp file:
var paths = {
all: ['*.js', '**/*.js', 'index.html'],
js: ['*.js', '**/*.js'],
html: 'index.html',
dist: 'dist'
};
var gulp = require("gulp");
var babel = require("gulp-babel");
gulp.task("default", function () {
gulp.src(paths.html)
.pipe(gulp.dest(paths.dist));
return gulp.src(paths.js)
.pipe(babel())
.pipe(gulp.dest(paths.dist));
});
gulp.task('watch', function(){
gulp.watch(paths.all, ['default']);
});
When I run it, I get this error
SyntaxError: d:/project_folder/node_modules/gulp-babel/node_modules/gulp-util/node_modules/beeper/index.js: 'return' outside of function (9:1) ...`
I read somewhere that I shouldn't compile dependencies. I run just gulp with no following flags. So I don't know wehether or not I do compile them. But gulp seems slow because it takes few seconds to get to first task. How to get rid of this error? And am I doing something wrong with dependencies?
Yes, you are currently including all .js files from the current directory, not the source directories. Your application code (lets assume app.js) will "include" your dependencies by using common js requires, such as:
var request = require('request');
In order to actually map the require statements you would want to use a module loader, or packer such as: Browserify or Webpack
The following gulp task will solve the module errors:
var paths = {
all: ['./src**/*.js', 'index.html'],
js: ['./src/**/*.js'],
html: 'index.html',
dist: 'dist'
};
var gulp = require('gulp');
var babel = require('gulp-babel');
gulp.task('default', function () {
gulp.src(paths.html)
.pipe(gulp.dest(paths.dist));
return gulp.src(paths.js)
.pipe(babel())
.pipe(gulp.dest(paths.dist));
});
gulp.task('watch', function(){
gulp.watch(paths.all, ['default']);
});
This only includes all the .js files in the src folder, not **/*.js which will include all *.js files in every folder including node_modules bower_components.
Regarding module loading:
You would probably want to use a loader task to bundle all your client code instead of just copying them to the dist, such as:
gulp.task("webpack", function(callback) {
// run webpack
webpack({
// configuration
}, function(err, stats) {
if(err) throw new gutil.PluginError("webpack", err);
gutil.log("[webpack]", stats.toString({
// output options
}));
callback();
});
});
I'm having a strange problem. I'm using gulp to compile a react app and am having it copy index.html to the appropriate web directory. When I first run gulp, all runs as expected, but when the file changes and the watch task is run, gulp copies an empty version of the file to the web directory. Does anyone know why this might be happening? Here is my gulpfile.js:
var gulp = require('gulp');
var browserify = require('browserify');
var babelify = require('babelify');
var source = require('vinyl-source-stream');
var livereload = require('gulp-livereload');
gulp.task('livereload', function() {
console.log('reloading');
livereload();
});
gulp.task('copyindextodist', function() {
gulp.src('app/index.html')
.pipe(gulp.dest('dist'));
});
gulp.task('compilejs', function() {
browserify({
entries: 'app/index.js',
extensions: ['.js'],
debug: true
})
.transform('babelify', {presets: ['es2015', 'react']})
.bundle()
.pipe(source('app.js'))
.pipe(gulp.dest('dist'));
});
gulp.task('publishapp', function() {
gulp.src('dist/*.*')
.pipe(gulp.dest('../public'));
});
gulp.task('copypaste', function() {
gulp.src('app/index.html')
.pipe(gulp.dest('../public'));
});
gulp.task('watch', function() {
livereload.listen();
gulp.watch('app/index.html', ['copyindextodist']);
gulp.watch('dist/index.html', ['publishapp']);
gulp.watch('app/index.js', ['compilejs']);
gulp.watch('dist/app.js', ['publishapp']);
});
gulp.task('default', ['copyindextodist', 'compilejs', 'publishapp', 'watch']);
I had the same problem until I defined the dependencies correctly. You can define which tasks should be completed, before the current task starts:
gulp.task('compress', ['copy'], function() {
//.... your job
});
This means that the compress task will wait for the copy task to be finished. If you don't do that, you might end up with empty/truncated files and other strange results.
Just take care that your copy tasks return a stream object.
gulp.task('copy', function() {
// "return" is the important part ;-)
return gulp.src(['filepath/**/*'])
.pipe(gulp.dest('lib/newpath'))
});
If you have multiple copy commands running in your task this is tricky, but there is an extension for this:
var gulp = require('gulp');
var merge = require('merge-stream');
gulp.task('copy', function() {
var allStreams = [
gulp.src(['node_modules/bootstrap/dist/**/*'])
.pipe(gulp.dest('lib/bootstrap')),
gulp.src(['node_modules/jquery/dist/**/*'])
.pipe(gulp.dest('lib/jquery')),
];
return merge.apply(this, allStreams);
});
gulp.task('nextTask', ['copy'], function() {
// this task formerly produced empty files, but now
// has a valid dependency on the copy stream and
// thus has all files available when processed.
});
I have the following gulpfile
gulp.task('browserify', function() {
bundle(false);
});
gulp.task('browserify-watch', function() {
bundle(true);
});
function bundle (performWatch) {
var bify = (performWatch === true
? watchify(browserify(finalBrowserifyOptions))
: browserify(finalBrowserifyOptions));
if (performWatch) {
bify.on('update', function () {
console.log('Updating project files...');
rebundle(bify);
});
}
bify.transform(babelify.configure({
compact: false
}));
function rebundle(bify) {
return bify.bundle()
.on('error', function () {
plugins.util.log('Browserify error');
})
.pipe(source('bundle.js'))
.pipe(buffer())
.pipe(plugins.sourcemaps.init({loadMaps: true}))
.pipe(plugins.sourcemaps.write('./'))
.pipe(gulp.dest(paths.build + assets.js));
}
return rebundle(bify);
}
The trouble is that gulp browserify works just fine. However, gulp browserify-watch detects the changes but the output never gets updated.
What am I doing wrong?
I encountered the same watchify failures in file-change detection (both with gulp and grunt). CLI watchify works as expected though. For example:
watchify ./src/app.js -t babelify --outfile ./build/bundle.js -v
Currently I switched to browserify-incremental. The perspective is different in regards to watchify approach. Here's the paragraph from their Github page which encopasses it best:
browserify-incremental can detect changes which occured in between
runs, which means it can be used as part of build systems which are
invoked on demand, without requiring a long lived process. Whereas
watchify is slow for the first run upon each startup,
browserify-incremental is fast every time after the very first.
Here's the "translated" CLI command to make use of browserify-incremental:
browserifyinc ./src/app.js -t babelify --outfile ./build/bundle.js -v
Here's a simple gulpfile.js script for watching and [re]bundling:
var gulp = require('gulp');
var sourcemaps = require('gulp-sourcemaps');
var source = require('vinyl-source-stream');
var buffer = require('vinyl-buffer');
var browserify = require('browserify');
var babel = require('babelify');
var browserifyInc = require('browserify-incremental');
var bundleApp = function() {
var browserifyObj = browserify('./src/app.js', { debug: false, cache: {}, packageCache: {}, fullPaths: true }).transform(babel);
var bundler = browserifyInc(browserifyObj, {cacheFile: './browserify-cache.json'});
bundler.on('time', function (time) {
console.log('-> Done in ' + time/1000 + ' ms');
});
console.log('-> Bundling...');
bundler.bundle()
.on('error', function(err) { console.error(err); this.emit('end'); })
.pipe(source('bundle.js'))
.pipe(buffer())
.pipe(sourcemaps.init({ loadMaps: true }))
.pipe(sourcemaps.write('./'))
.pipe(gulp.dest('./build'));
};
gulp.task('browserify', function () {
gulp.watch('./src/**/*.js', function () {
return bundleApp();
});
});
gulp.task('default', ['browserify']);
Looks like everything was fine my IDE, Webstorm, was caching the file. When I looked at the actual file on the box it was fine.
I'm running a simple script that turns HAML and LESS into PHP and CSS. I'm also using gulp.watch to check up on all the changes I make.
Problem: gulp-less and gulp-haml process the files correctly. But when gulp.watch sees a change in the file the file ends up empty. gulp.watch is using the exact same task that succesfully created the php file.
var gulp = require('gulp');
var gutil= require('gulp-util');
var path = require('path');
var haml = require('gulp-haml');
var less = require('gulp-less');
// Images
gulp.task('less', function() {
return gulp.src('wp-content/themes/luento/assets/less/*.less')
.pipe(less({paths: [ path.join(__dirname, 'less', 'includes') ]}))
.pipe(gulp.dest('wp-content/themes/luento/assets/css/'));
});
gulp.task('haml-def', function () {
return gulp.src('wp-content/themes/luento/*.haml')
.pipe(haml({ext: '.php'}))
.pipe(gulp.dest('wp-content/themes/luento/'));
});
gulp.task('haml-templates', function () {
return gulp.src('wp-content/themes/luento/templates/*.haml')
.pipe(haml({ext: '.php'}))
.pipe(gulp.dest('wp-content/themes/luento/templates/'));
});
gulp.task('haml-partials', function () {
return gulp.src('wp-content/themes/luento/partials/*.haml')
.pipe(haml({ext: '.php'}))
.pipe(gulp.dest('wp-content/themes/luento/partials/'));
});
// Watch
gulp.task('watch', function() {
// Watch .scss files
gulp.watch('wp-content/themes/luento/templates/*.haml', ['haml-templates']);
gulp.watch('wp-content/themes/luento/partials/*.haml', ['haml-partials']);
gulp.watch('wp-content/themes/luento/*.haml', ['haml-def']);
gulp.watch('wp-content/themes/luento/assets/less/*.less', ['less'])
});
gulp.task('default', ['less', 'haml-def', 'haml-templates','haml-partials','watch']);