i rewrote a example found here Watch a folder for changes using node.js, and print file paths when they are changed
using chokidar
i want know when a folder is added to my folder (only interested in folders not files).
i rewrote the example to:
var chokidar = require('chokidar');
var watcher = chokidar.watch('\.\');
watcher
.on('add', function(path) {console.log('File', path, 'has been added');})
.on('change', function(path) {console.log('File', path, 'has been changed');})
.on('unlink', function(path) {console.log('File', path, 'has been removed');})
.on('error', function(error) {console.error('Error happened', error);})
whith no effect. when i add a new folder i get no message.
Operating System: Kubuntu 20.04
Related
I'm trying to build an Octopus Deploy package for an angular-cli project using Gulp and Gulp-Octo:
const gulp = require("gulp"),
octopus = require("#octopusdeploy/gulp-octo"),
version = require("./package.json").version;
gulp.task("octopack",
["build-prod"],
() => gulp.src("dist/*")
.pipe(octopus.pack(
"zip", // octopackjs does not support nupkg format yet
{
id: "myprojectid",
version: `${version}.${commandLineOptions.buildnumber}`
}))
.pipe(gulp.dest('./octopus'))
);
This creates a package with the correct contents and version number, but it always goes into the current directory (alongside gulpfile.js) instead of the directory that I specified in gulp.dest().
I have tried all of the following variations in the call to gulp.dest, with the same result:
./octopus
./octopus/
octopus/
octopus
path.join(__dirname, 'octopus')
Am I misunderstanding how gulp.dest() works, or is octopus.pack() doing something weird?
(Note: If I leave out the gulp.dest() altogether then no zip file is created.)
It's a bug in gulp-octo. In this line they set the path of the generated archive. Unfortunately they just use the filename of the archive instead of a full path (which is what they're supposed to do), so the file is always written relative to the current working directory.
I might send them a pull request when I get the chance, since this is an easy fix.
In the meantime you can use the following workaround:
var path = require('path');
gulp.task("default",
() => gulp.src("dist/*")
.pipe(octopus.pack(
"zip", // octopackjs does not support nupkg format yet
{
id: "myprojectid",
version: `${version}.${commandLineOptions.buildnumber}`
}))
.on('data', (f) => { f.path = path.join(f.base, f.path) })
.pipe(gulp.dest('./octopus'))
);
I have this file structure:
Root
Folder1
Subfolder1.1
Subfolder1.1.1
file_1.1.1.1.js
file_1.1.1.2.js
file_1.1.1.js
file_1.1.2.js
Subfolder1.2
Folder2
Folder3
I'm trying to accomplish the following gulp task that will take the root directory, in this case, the Root folder, and generate the following structure:
Root
Folder1
Subfolder1.1
Subfolder1.1.1.min.js
file_1.1.1.min.js
file_1.1.2.min.js
Subfolder1.2
Folder2
Folder3
As you can see, the files that are directly in the second level, e.g. Subfolder1.1, are just minified. All the files that go deeper than two levels will be concatenated and named after the second level folder containing them.
Is this possible to accomplish in gulp, and if it is, can anybody give me a clue on how to do it?
Maybe this can help you to run some tasks by folder.
Or maybe you can find some help on building such a thing yourself by iterating over directories.
Okay, I was able to do it, here is the code:
gulp.task('task',
function() {
// The input root dir
var root = 'root_in';
// The output root dir
var rootOut = 'root_out'
// first get all the folders in the in the root directory
var folders = fs.readdirSync(root)
.filter(function(file) {
return fs.statSync(path.join(root, file)).isDirectory();
});
return folders.map(function(folder) {
// get the files inside each folder
var files = fs.readdirSync(path.join(root, folder));
files.map(function(file) {
// in case it is a directory, concat all the files
if (fs.statSync(path.join(root, folder, file)).isDirectory()) {
return gulp.src(path.join(root, folder, file, '/**/*.js'))
.pipe(uglify())
.pipe(gulp_concat(file + '.js'))
.pipe(gulp.dest(path.join(root, folder)))
}
// if it is a regular file, just uglify it and output
else {
return gulp.src(path.join(root, folder, file))
.pipe(uglify())
.pipe(gulp.dest(path.join(rootOut, folder)));
}
});
});
});
I'm using Gulp to compress a zip file and then upload it to AWS Lambda. The upload zip file is done manually. Only the process of compressing is handled by Gulp.
Here is my gulpfile.js
var gulp = require('gulp');
var zip = require('gulp-zip');
var del = require('del');
var install = require('gulp-install');
var runSequence = require('run-sequence');
var awsLambda = require("node-aws-lambda");
gulp.task('clean', function() {
return del(['./dist', './dist.zip']);
});
gulp.task('js', function() {
return gulp.src('index.js')
.pipe(gulp.dest('dist/'));
});
gulp.task('npm', function() {
return gulp.src('./package.json')
.pipe(gulp.dest('dist/'))
.pipe(install({production: true}));
});
gulp.task('zip', function() {
return gulp.src(['dist/**/*', '!dist/package.json'])
.pipe(zip('dist.zip'))
.pipe(gulp.dest('./'));
});
gulp.task('deploy', function(callback) {
return runSequence(
['clean'],
['js', 'npm'],
['zip'],
callback
);
});
After running the deploy task, a zip folder named dist.zip is created consists of a index.js file and a node_modules folder. The node_modules folder contains only a lodash library.
This is index.js
var _ = require('lodash');
console.log('Loading function');
exports.handler = (event, context, callback) => {
//console.log('Received event:', JSON.stringify(event, null, 2));
var b = _.chunk(['a', 'b', 'c', 'd', 'e'], 3);
console.log(b);
callback(null, event.key1); // Echo back the first key value
//callback('Something went wrong');
};
After using AWS lambda console to upload the dist.zip folder. There is an error showing that the lodash library cannot be found
{
"errorMessage": "Cannot find module 'lodash'",
"errorType": "Error",
"stackTrace": [
"Function.Module._load (module.js:276:25)",
"Module.require (module.js:353:17)",
"require (internal/module.js:12:17)",
"Object.<anonymous> (/var/task/index.js:1:71)",
"Module._compile (module.js:409:26)",
"Object.Module._extensions..js (module.js:416:10)",
"Module.load (module.js:343:32)",
"Function.Module._load (module.js:300:12)",
"Module.require (module.js:353:17)"
]
}
But in the zip folder, there is a node_modules directory that contains the lodash lib.
dist.zip
|---node_modules
|--- lodash
|---index.js
When i zip the node_modules directory and the file index.js manually, it works fine.
Does anyone have idea what wrongs ? Maybe when compressing using Gulp, there is a misconfigured for the lib path ?
I had same problem few days back.
Everyone pointed to gulp zip, however it was not problem with gulp zip.
Below worked fine:
gulp
.src(['sourceDir/**'], {nodir: true, dot: true} )
.pipe(zip('target.zip'))
.pipe(gulp.dest('build/'));
That is, note the below, in 2nd param of src, in the above:
{nodir: true, dot: true}
That is, we have to include dot files for the zip (ex: .config, .abc, etc.)
So, include above in .src of gulp, else all others like copy, zip, etc. will be improper.
The package gulp-zip is massively popular (4.3k downloads per day) and there does not seem to be any Gulp substitute. The problem is definitely with relative paths and how gulp-zip processes them. Even when using a base path option in the gulp.src function (example below), gulp-zip finds a way to mess it up.
gulp.task("default", ["build-pre-zip"], function () {
return gulp.src([
"dist/**/*"
], { base: "dist/" })
.pipe(debug())
.pipe(zip("dist.zip"))
.pipe(gulp.dest("./dist/"));
});
Since there is no good Gulp solution as of 1/4/2017 I suggest a work-around. I use Gulp to populate the dist folder first, exactly how I need it with the proper node_modules folder. Then it is time to zip the dist folder properly with relative file paths stored. To do that and also update Lambda, I use a batch file (Windows) of command line options to get the job done. Here is the upload.bat file I created to take the place of the gulp-zip task:
start /wait cmd /c "gulp default"
start /wait cmd /c "C:\Program Files\WinRAR\WinRAR.exe" a -r -ep1 dist\dist.zip dist\*.*
aws lambda update-function-code --zip-file fileb://dist/dist.zip --function-name your-fn-name-here
If you use WinRAR you will find their command line docs here, for WinZip go here. That .bat file assumes you are using the AWS Command Line Interface (CLI) which is a godsend; get it here.
If you are wishing this answer pointed you towards a 100% Gulp solution, to that I say, "You and me both!". Good luck.
I override the main directories for the Bootstrap in bower.json:
"main" : [
"./dist/css/bootstrap.css",
"./dist/css/bootstrap.css.map",
"./dist/css/bootstrap-theme.css",
"./dist/css/bootstrap-theme.css.map",
"./dist/js/bootstrap.js",
"./dist/fonts/*",
"./less/**"
]
And I want that a files were copied with css, js, fonts folders. I.e. can I set '/dist/' as a base forder?
Or can I do it in the gulp task? In gulpfile.js I wrote:
var files = mainBowerFiles('**/dist/**');
return gulp.src( files, {base: 'C:/Users/Den/Desktop/HTML5-Demo/bower_components/bootstrap/dist/'} )
.pipe( gulp.dest('public_html/libs') );
But I'm forced to write a full path which of course is bad. Is there way to use a relative path?
Also I want to ask what does '.' in the beginning of the directories mean?
To use relative path you need to get current working directory.
var path = require('path');
var cwd = process.cwd(); // current working directory
var basePath = path.resolve(cwd, "bower_components/bootstrap/dist");
The next code works:
var stream = gulp.src(files, {base: './bower_components/bootstrap/dist'})
I'm working on site files outside of my local htdocs that I want to deploy to the local site. I will use gulp-watch to watch for changes, but I'm stumbling on just the rsync set up. File structure:
sitename/ --
--htdocs/sites/all/themes/themename
--source/themes/themename
And here's the gulpfile:
var gulp = require('gulp');
var rsync = require('gulp-rsync');
gulp.task('deploy', function() {
gulp.src('source/**')
.pipe(rsync({
root: 'source',
destination: '/htdocs/sites/all'
}));
});
And when I run gulp deploy
[11:36:53] Using gulpfile ~/Sites/sitename/gulpfile.js
[11:36:53] Starting 'deploy'...
[11:36:53] Finished 'deploy' after 5.23 ms
Nothing is written anywhere. Am I missing something obvious...
rsync is short for "remote sync", and thus needs a destination somehwere else than your local system. If you want to synchronize stuff from one directory to the other, just use the built-in gulp methods.
gulp.task('deploy', function() {
gulp.src('source/**')
.pipe(gulp.dest('/htdocs/sites/all');
});
This will copy everything you need to the other folder. If you are looking to also delete files, add this to your watcher:
var watcher = gulp.watch('./source/**/*', ['deploy']);
watcher.on('change', function(ev) {
if(ev.type === 'deleted') {
// path.relative gives us a string where we can easily switch
// directories
del(path.relative('./', ev.path).replace('./source','/htdocs/sites/all'));
}
});