I have some application/configurations variables, such as MY_ENDPOINT, that should point to http://dev.myendpoint/ for dev and http://myendpoint/ for production. Where should I store this information in a ReactJS application that uses gulpjs (and flux)?
If you're using browserify, you can use the envify transform to provide compile time variables.
var envParams = {};
function bundle(){
return gulp.src('src/app.js')
.pipe(gulpBrowserify({
transform: [
'reactify',
['envify', envParams]
]
}))
.pipe(gulp.dest('dist'));
};
gulp.task('scripts-dev', function(){
envParams = {MY_ENDPOINT: 'http://dev.myendpoint/'};
return bundle();
});
gulp.task('scripts-prod', function(){
envParams = {MY_ENDPOINT: 'http://myendpoint/'};
return bundle();
});
And in your code:
fetch(process.env.MY_ENDPOINT + "api/foo')...
You can also supply env vars on the command line, however the gulp file will override them by default:
MY_ENDPOINT=something gulp scripts-dev
And to allow command line to take precedence:
envParams = {MY_ENDPOINT: process.env.MY_ENDPOINT || 'http://myendpoint/'};
// or with es6 shim
envParams = Object.assign({MY_ENDPOINT: 'http://myendpoint/'}, process.env);
You can also use envify before other transforms which require static strings, like brfs:
var config = JSON.parse(fs.readFileSync(process.env.CONFIG_PATH));
Which will compile into:
var config = JSON.parse(fs.readFileSync('/home/.../dev-config.json'));
And finally into:
var config = JSON.parse('{"foo": "bar"}');
Related
How to achieve a replacing a string on a particular source file while the source files will be concatenated.
var gulp = require('gulp'),
rename = require('gulp-rename'),
concat = require('gulp-concat'),
uglify = require('gulp-uglify'),
replace = require('gulp-replace');
var config = {
cssConcatFiles: [
'one.css',
'two.css',
'three.css'
]
};
gulp.task('css-concat', function() {
return gulp.src(config.cssConcatFiles)
.pipe(replace('url\(\'', 'url\(\'../images/fancybox/'))
// I want to perform this replace to a particular file which is "two.css"
.pipe(concat('temp.css'))
.pipe(uglyfycss())
.pipe(rename('temp.min.css'))
.on('error', errorLog)
.pipe(gulp.dest('public/css/plugins/fancybox'));
});
This should work. gulp-if
const gulpIF = require('gulp-if');
gulp.task('css-concat', function() {
return gulp.src(config.cssConcatFiles)
//.pipe(replace('url\(\'', 'url\(\'../images/fancybox/'))
// I want to perform this replace to a particular file which is "two.css"
.pipe(gulpIF((file) => file.path.match('two.css') , replace('url\(\'', 'url\(\'../images/fancybox/')))
.pipe(concat('temp.css'))
.pipe(uglyfycss())
.pipe(rename('temp.min.css'))
.on('error', errorLog)
.pipe(gulp.dest('public/css/plugins/fancybox'));
});
You could use the following pipe instead of the gulp-if call:
.pipe(replace(/url\(\'/g, function(match) {
if (this.file.relative === "two.css") {
return 'url\(\'../images/fancybox/';
}
else return match;
}))
since gulp-replace will take a function as an argument and provide that function with a vinyl file reference (this.file) which you can use to test for which file is passing through the stream. You must, however, return something from the function call even when you want to do nothing - so return the original match.
I recommend using gulp-if, much cleaner in your case.
A lot of examples of gulp setups are using the common JS pattern. Gulp tasks are defined follows:
myGulpTask.js
const gulp = require('gulp');
const paths = {
src = './src',
dest = './dest'
}
const myGulpTask = function() {
return gulp.src(paths.srcFoo)
.pipe() // do stuff
.pipe(gulp.dest(paths.dest));
}
module.exports = myGulpTask;
gulp.task('my-gulp-task', myGulpTask);
This allows you to use this gulp task using:
$ npm run gulp myGulpTask
But since the task is directly assigned, would it make sense to define the export as follows:
//...
const myGulpTask = module.exports = function() {
return gulp.src(paths.srcFoo)
.pipe() // do stuff
.pipe(gulp.dest(paths.dest))
}
gulp.task('my-gulp-task', myGulpTask);
//...
Maybe it's sweating the small stuff, or is there a difference in these two module declarations?
There is no difference in either way, the first one is more friendly and easy to read.
Is it possible to get the list of files coming from a gulp.src stream as an array, e.g.:
var files = convertToArray(gulp.src('**/*.js'));
Update:
I was trying to move away from the gulp-karma plugin:
gulp.task('test', function () {
return gulp.src(files)
.pipe($.order(ordering))
.pipe($.karma({
karma.conf.js'
});
});
So my idea was:
gulp.task('test', function (done) {
var karmaFiles = convertToArray(gulp.src(files)
.pipe($.order(ordering)));
new Server({
configFile: karma.conf.js',
files: karmaFiles
}, done).start();
});
But as pointed out, this won't work because of it being async. Here's my solution:
gulp.task('test', function (done) {
gulp.src(files)
.pipe($.order(ordering)))
.pipe(gutil.buffer())
.on('data', function(data) {
var karmaFiles = data.map(function(f) { return f.path; });
new Server({
configFile: __dirname + '/karma.conf.js',
files: karmaFiles
}, done).start();
});
});
Gulp streams are always asynchronous so your hypothetical convertToArray function (which takes a stream and returns an array) is impossible.
The only way to get all the files in a stream is through some kind of callback function. The gulp-util package, which bundles various helper functions, provides the nice gutil.buffer() :
var gutil = require('gulp-util');
gulp.src('**/*.js').pipe(gutil.buffer(function(err, files) {
console.log('Path of first file:');
console.log(files[0].path);
console.log('Contents of first file:');
console.log(files[0].contents.toString());
}));
In the above files will be an array of vinyl files. That means for each file you have access to both the contents and the path of the file.
If you don't care about the file contents and only want the path of each file you shouldn't be using gulp.src() at all. You should be using glob instead (which is what gulp is using internally). It gives you a synchronous method that returns an array of matching file paths:
var glob = require('glob');
var files = glob.sync('**/*.js');
console.log(files);
I'm currently getting into browserify. I like it so far but before I start using it I want to automate it. Gulp is the build system of my choice.
So what I actually want to do is:
Get js/app/**.js, bundle it to js/bundle/ and extract common dependencies into js/bundle/common.js. In addition uglify everything and add source maps.
Well. The gulp support for browserify kinda seems poor, at least my google researches were pretty disappointing.
Anyway. What I've got so far.
var gulp = require('gulp'),
browserify = require('browserify'),
factor = require('factor-bundle');
// ...
// gulp task
return browserify({
entries: ['js/app/page1.js', 'js/app/page2.js'],
debug: true
})
.plugin(factor, {
o: ['js/bundle/page1.js', 'js/bundle/page2.js']
})
.bundle()
.pipe(source('common.js'))
.pipe(gulp.dest('js/bundle/'));
Well this is neither uglifying nor adding sourcemaps and much less using a glob pattern. I can find an official recipe which shows me how to use the pipe to add additional transformations like uglify. But it's only for a single file.
as an outputs parameter to factor-bundle, use streams instead of file paths. You can do whatever you want with the streams then.
var indexStream = source("index.js");
var testStream = source("tests.js");
var commonStream = bundler.plugin('factor-bundle', { outputs: [indexStream, testStream] })
.bundle()
.pipe(source('common.js'));
return merge(indexStream, commonStream, testStream)
.pipe(buffer())
.pipe(sourcemaps.init({ debug: true, loadMaps: true }))
.pipe(uglify())
.pipe(gulp.dest('js/bundle/'))
Thanks to Liero's answer, I got something very similar working. Here's the complete gulpfile:
const gulp = require('gulp');
const browserify = require('browserify');
const factor = require('factor-bundle');
const source = require('vinyl-source-stream');
const sourcemaps = require('gulp-sourcemaps');
const buffer = require('gulp-buffer');
const merge = require('gulp-merge');
gulp.task('bfb', function () {
const fejs = 'public/javascripts/' // location of source JS
const fejsb = fejs + 'b/'; // location of bundles
const modules = [ // aka entry points
'accounts',
'invoice',
'invoices',
// etc...
];
const inputs = [];
const streams = [];
modules.forEach(function (module) {
inputs.push(fejs + module + '.js');
streams.push(source(module + '.js'));
});
const bundler = browserify(inputs, {});
const commonStream = bundler.plugin(factor, { outputs: streams })
.bundle()
.pipe(source('common.js'));
streams.push(commonStream);
return merge(streams)
.pipe(buffer())
.pipe(sourcemaps.init({loadMaps: true}))
//.pipe(uglify()) // haven't tested this bit
.pipe(sourcemaps.write('./'))
.pipe(gulp.dest(fejsb));
});
I am trying to migrate our build process to gulp from existing custom bash build script. We concatenate several unminified opensource JS files like bootstrap, lazyload, ... and our own JS files. We uglify each JS file (removing their licenses as well) in an order, prepend custom license text to some of them as required and concatenate to create the output JS file. The custom license text are currently kept as strings in the bash script.
How to achieve this in gulp without creating intermediate files?
Will it also be possible to selectively avoid uglifying some JS scripts?
Ok, I spent some time learning up gulp and it's plugins and here is a working version. The points here are using the foreach on each JS retrieved from the JSON config file, pushing the streams to an array and finally using merge on the array streams.
Here are the plugins used and the JSON structure defined:
var gulp = require('gulp');
var each = require('foreach');
var debug = require('gulp-debug');
var gulpif = require('gulp-if');
var jshint = require('gulp-jshint');
var uglify = require('gulp-uglify');
var concat = require('gulp-concat-util');
var es = require('event-stream');
var cache = require('gulp-cached');
var remember = require('gulp-remember');
// Structure that holds the various JS files and their handling
var Config = {
js: {
output_dir: 'path/to/output/file/',
output_file: 'outputfile.js',
src: [{
name: 'bootstrap',
src: ['path/to/bootstrap.js'],
run_lint: false,
run_uglify: true,
license: '/* bootstrap license */'
}, {
name: 'lazyload',
src: ['path/to/lazyload.js'],
run_lint: false,
run_uglify: true,
license: '/* lazyload license */'
}, {
name: 'inhouse-js',
src: ['path/to/inhouse/ih-1.js', 'path/to/inhouse/ot/*.js'],
run_lint: true,
run_uglify: true,
license: ''
}]
}
}
The build task, with caching as we will be using it in development also:
gulp.task('build', ['build:js']);
gulp.task('build:js', function() {
var streams = [];
each(Config.js.src, function(val, key, array) {
var stream = gulp.src(val.src)
.pipe(cache('scripts'))
.pipe(gulpif(val.run_lint, jshint('.jshintrc')))
.pipe(gulpif(val.run_lint, jshint.reporter('jshint-stylish')))
.pipe(gulpif(val.run_uglify, uglify({
compress: {
drop_console: true
}
})))
.pipe(concat.header(val.license + '\n'));
streams.push(stream);
});
es.merge.apply(this, streams)
.pipe(remember('scripts')) // add back all files to the stream
.pipe(concat(Config.js.output_file))
.pipe(gulp.dest(Config.js.output_dir));
});
If you would like to debug, a good option will be to insert debug plugin like this example around the 'gulp-remember' plugin call above:
.pipe(debug({title: 'before remember:'}))
.pipe(remember('scripts')) // add back all files to the stream
.pipe(debug({title: 'after remember:'}))
And here's the watch task:
gulp.task('watch', function() {
var watch_list = [];
each(Config.js.src, function(val, key, array) {
watch_list.push.apply(watch_list, val.src);
});
// Watch .js files
var watcher = gulp.watch(watch_list, ['build']);
watcher.on('change', function(event) {
console.log('File '+ event.path +' was '+ event.type +', running tasks..');
if (event.type === 'deleted') { // if a file is deleted, forget it
delete cache.caches['scripts'][event.path];
remember.forget('scripts', event.path);
}
})
});
You can use lazypipe() to reuse parts of the build:js task with normal build.