git status can display all changed files in the current folder.
But the filepath is required with isomorphic-git's status command.
How can I get the list of the changed files with isomorphic-git?
Thank you.
There is statusMatrix command for enumerating statuses of many/all files.
An example of enumerating all modified/deleted/added files:
const FILE = 0, HEAD = 1, WORKDIR = 2, STAGE = 3;
const statusMapping = {
"003": "added, staged, deleted unstaged",
"020": "new, untracked",
"022": "added, staged",
"023": "added, staged, with unstaged changes",
"100": "deleted, staged",
"101": "deleted, unstaged",
"103": "modified, staged, deleted unstaged",
"111": "unmodified",
"121": "modified, unstaged",
"122": "modified, staged",
"123": "modified, staged, with unstaged changes"
};
let statusMatrix = (await git.statusMatrix({ fs, dir }))
.filter(row => row[HEAD] !== row[WORKDIR] || row[HEAD] !== row[STAGE]);
let allUncommitedChanges = statusMatrix.map(row => statusMapping[row.slice(1).join("")] + ": " + row[FILE]);
Related
I have a script (thankyou Cooper) that I'm trying to use to generate a list of all folders and files from within one particular folder in a shared drive, however, the script is returning the following error:
Exception: Argument too large: value
gff # Folder Listing.gs:67
(anonymous) # Folder Listing.gs:72
gff # Folder Listing.gs:68
(anonymous) # Folder Listing.gs:72
gff # Folder Listing.gs:68
(anonymous) # Folder Listing.gs:72
gff # Folder Listing.gs:68
(anonymous) # Folder Listing.gs:72
gff # Folder Listing.gs:68
(anonymous) # Folder Listing.gs:72
I'm assuming the problem is the script is grabbing all folders and files within the shared drive file and it's far too large to run the script, so I'm trying to reduce the source size by targeting a particular folder (or just returning the folder names and not worry about files at all).
What can I change on this script reduce the size issue?
function sharedDriveTrees() {
const ss = SpreadsheetApp.openById("blah");//need to change the ssid for the output spreadsheet
const r = Drive.Drives.list();
const drives = JSON.parse(r).items;
const shts = ss.getSheets().filter((sh, i) => i < drives.length).filter(e => e);
var trees = [];
drives.forEach((obj, i) => {
obj["title"] = obj.name;
let ob =JSON.parse(Drive.Files.get(obj.id,{ supportsAllDrives: true, supportsTeamDrives: true }));
obj["alternateLink"] = ob.alternateLink;
Logger.log('Drive Title: %s Time: %s', obj.title, Utilities.formatDate(new Date(), ss.getSpreadsheetTimeZone(), "HH:mm:ss"));
shts[i].setName(`${obj.title}\n${Utilities.formatDate(new Date(),ss.getSpreadsheetTimeZone(),"yyyy.MM.dd HH:mm:ss")}`);
let tree = [];
CacheService.getScriptCache().put("tree", JSON.stringify(tree), 60);
level = 1;
gff(obj)
tree = JSON.parse(CacheService.getScriptCache().get("tree"));
let l = tree.reduce((a, c) => {
if (c.length > a) {
a = c.length;
}
return a;
}, 0);
tree.forEach((a, j) => {
if ((l - a.length) > 0) {
let arr = [...Array.from(new Array(l - a.length).keys(), x => "")];
tree[j] = a.concat(arr);
}
});
trees.push(tree);
const sh = shts[i];
sh.clearContents();
sh.getRange(1, 1, tree.length, tree[0].length).setValues(tree);
SpreadsheetApp.flush();
});
}
level = 1;
function gff(fobj) {
//Logger.log('Drive Title: %s', fobj.title);
const r = Drive.Children.list(fobj.id);
const fldrMime = "application/vnd.google-apps.folder";
let tree = JSON.parse(CacheService.getScriptCache().get("tree"));
let files = [];
let subfolders = [];
fobj["level"] = level;
let children = JSON.parse(r).items;
children.forEach((obj, i) => {
let o = JSON.parse(Drive.Files.get(obj.id, { supportsAllDrives: true, supportsTeamDrives: true }));
o["level"] = level;
if (o.mimeType == fldrMime) {
subfolders.push(o);
} else {
files.push(o);
}
});
//Logger.log('level: %s', level);
let arr1 = [...Array.from(new Array(level).keys(), x => { if (x < (level - 1)) { return '' } else { return `=HYPERLINK("${fobj.alternateLink}","${fobj.title}")` } })];
//Logger.log('arr1: %s', JSON.stringify(arr1));
tree.push(arr1)
if (files && files.length > 0) {
files.forEach(obj => {
let arr2 = [...Array.from(new Array(level + 1).keys(), x => { if (x < (level)) { return '' } else { return `=HYPERLINK("${obj.alternateLink}","${obj.title}")` } })];
//Logger.log('arr2: %s', JSON.stringify(arr2));
tree.push(arr2)
})
}
CacheService.getScriptCache().put("tree", JSON.stringify(tree), 60);
subfolders.forEach(obj => {
level++;
obj.level = level;
CacheService.getScriptCache().put("tree", JSON.stringify(tree), 60);
gff(obj);
tree = JSON.parse(CacheService.getScriptCache().get("tree"))
});
level--;
return;
}
Edit:
After checking the affected line I found out that the issue is happening at CacheService.getScriptCache().put("tree", JSON.stringify(tree), 60). The Cache Documentation explains that the limit for the second parameter value is 100KB. The original script creator was using the CacheService as kind of a global variable to save every iteration of the loop as it kept adding levels to the tree. Since your folder structure is pretty large it grew beyond the 100KB limit.
As far as I can tell there's no way to raise this limit so I rewrote a few lines to pass the object to the gff() function instead of using the cache.
I kept the hyperlinks, but I also added a couple commented lines that you can switch to just return folder names as you requested. You can find them at let arr1 = and let arr2 =. You can switch between them to see if there's a performance improvement. You could also just change those lines in the original code that uses the cache, but you may still eventually run into the limit:
function folderTrees() {
const ss = SpreadsheetApp.openById("<Your spreadsheet id>");//need to change the ssid for the output spreadsheet
const f = Drive.Files.get("<Folder ID>", { supportsAllDrives: true, supportsTeamDrives: true });
const obj = JSON.parse(f);
const sh = ss.getSheets()[0];
var trees = [];
Logger.log('Folder Title: %s Time: %s', obj.title, Utilities.formatDate(new Date(), ss.getSpreadsheetTimeZone(), "HH:mm:ss"));
sh.setName(`${obj.title}\n${Utilities.formatDate(new Date(), ss.getSpreadsheetTimeZone(), "yyyy.MM.dd HH:mm:ss")}`);
let tree = [];
level = 1;
tree = gff(obj, tree)
let l = tree.reduce((a, c) => {
if (c.length > a) {
a = c.length;
}
return a;
}, 0);
tree.forEach((a, j) => {
if ((l - a.length) > 0) {
let arr = [...Array.from(new Array(l - a.length).keys(), x => "")];
tree[j] = a.concat(arr);
}
});
trees.push(tree);
sh.clearContents();
sh.getRange(1, 1, tree.length, tree[0].length).setValues(tree);
SpreadsheetApp.flush();
}
level = 1;
function gff(fobj, treeobj) {
const r = Drive.Children.list(fobj.id);
const fldrMime = "application/vnd.google-apps.folder";
let tree = treeobj;
let files = [];
let subfolders = [];
fobj["level"] = level;
let children = JSON.parse(r).items;
children.forEach((obj, i) => {
let o = JSON.parse(Drive.Files.get(obj.id, { supportsAllDrives: true, supportsTeamDrives: true }));
o["level"] = level;
if (o.mimeType == fldrMime) {
subfolders.push(o);
} else {
files.push(o);
}
});
//first line adds the hyperlinks and the second one returns only text
let arr1 = [...Array.from(new Array(level).keys(), x => { if (x < (level - 1)) { return '' } else { return `=HYPERLINK("${fobj.alternateLink}","${fobj.title}")` } })];
//let arr1 = [...Array.from(new Array(level).keys(), x => { if (x < (level - 1)) { return '' } else { return fobj.title } })];
tree.push(arr1)
if (files && files.length > 0) {
files.forEach(obj => {
//first line adds the hyperlinks and the second one returns only text
let arr2 = [...Array.from(new Array(level + 1).keys(), x => { if (x < (level)) { return '' } else { return `=HYPERLINK("${obj.alternateLink}","${obj.title}")` } })];
//let arr2 = [...Array.from(new Array(level + 1).keys(), x => { if (x < (level)) { return '' } else { return obj.title } })];
tree.push(arr2)
})
}
subfolders.forEach(obj => {
level++;
obj.level = level;
tree = gff(obj, tree);
});
level--;
return tree;
}
And here's the output:
A few things to note:
You'll need to get the folder ID to plug into the f variable.
The original script looped through all the shared Drives and wrote each one's tree to a different sheet within your spreadsheet. Since you only seemed to want a single folder's tree now it just writes to the first sheet in the file.
I'm trying to create a listing of all my google drive folders and I have the following script and a link to the question where I got the script from. I'm just not sure how to implement it. I put the Drive ID in line 6 and then ran it and got this error; Can anyone tell me where I'm going wrong?
List every file and folder of a shared drive in a spreadsheet with Apps Script
11:08:37 AM Error
ReferenceError: gobj is not defined
getFoldersInASharedFolder # Folder Listing.gs:12
This is the script
function getFoldersInASharedFolder() {
let tr = [];
let token = '';
let page = 0;
do {
let r = Drive.Files.list({ corpora: 'drive', includeItemsFromAllDrives: true, supportsTeamDrive: true, supportsAllDrives: true, driveId: "???", pageToken: token,q: "mimeType = 'application/vnd.google-apps.folder'" });
let obj = JSON.parse(r);
tr.push(obj)
token = obj.nextPageToken
} while (token != null)
let folder = DriveApp.getFolderById(gobj.globals.testfolderid);
folder.createFile(`SharedDriveList ${Utilities.formatDate(new Date(), Session.getScriptTimeZone(), "MM/dd/yyyy HH:mm:ss")}`, JSON.stringify(tr), MimeType.PLAIN_TEXT);
let html = '<style>td,th{border:1px solid black;font-size: 16px;}</style><table><tr><th>Title</th><th>Id</th><th>Path</th></tr>';
tr.forEach((o, i) => {
o.items.forEach(item => {
if (item.mimeType = "application/vnd.google-apps.folder") {
html += `<tr><td>${item.title}</td><td>${item.id}</td><td>${getPathAllDrivesFromId(item.id)}</td></tr>`;
}
})
});
html += '</table><input type="button" value="exit" onclick="google.script.host.close()" />';
SpreadsheetApp.getUi().showModelessDialog(HtmlService.createHtmlOutput(html).setHeight(500).setWidth(1200), `Folders in: ${JSON.parse(Drive.Drives.get("driveid")).name}`);
}
The error message occurs because
let folder = DriveApp.getFolderById(gobj.globals.testfolderid);
use the a nested property of gobj as parameter but gobj was not declared.
You can fix this error either by properly declaring gobj or by replacing gobj.globals.testfolderid by the folder id (properly set as a string).
The comments explain how to use this function
function getFoldersInASharedFolder() {
const sharedriveid = "";//add the shared drive id here
const storagefilefolderid = ""; //this where I was storing the response which I used when I was first building the script. It's not necessary to do this if you don't wish to. You can just comment that code out of the script
let tr = [];
let token = '';
let page = 0;
do {
let r = Drive.Files.list({ corpora: 'drive', includeItemsFromAllDrives: true, supportsTeamDrive: true, supportsAllDrives: true, driveId: sharedriveid, pageToken: token,q: "mimeType = 'application/vnd.google-apps.folder'" });//drive id for the shared drive that you wish all of the folders from
let obj = JSON.parse(r);
tr.push(obj)
token = obj.nextPageToken
} while (token != null)
let folder = DriveApp.getFolderById(storagefilefolderid);//the folder id for the file that stores the results
folder.createFile(`SharedDriveList ${Utilities.formatDate(new Date(), Session.getScriptTimeZone(), "MM/dd/yyyy HH:mm:ss")}`, JSON.stringify(tr), MimeType.PLAIN_TEXT);
let html = '<style>td,th{border:1px solid black;font-size: 16px;}</style><table><tr><th>Title</th><th>Id</th><th>Path</th></tr>';
tr.forEach((o, i) => {
o.items.forEach(item => {
if (item.mimeType = "application/vnd.google-apps.folder") {
html += `<tr><td>${item.title}</td><td>${item.id}</td><td>${getPathAllDrivesFromId(item.id)}</td></tr>`;
}
})
});
html += '</table><input type="button" value="exit" onclick="google.script.host.close()" />';
SpreadsheetApp.getUi().showModelessDialog(HtmlService.createHtmlOutput(html).setHeight(500).setWidth(1200), `Folders in: ${JSON.parse(Drive.Drives.get(sharedrivedid)).name}`);
}
How do I get the name of the parent folder using gulp-data? Currently I'm using the following:
In my front matter
---
title: 'some title'
----
from my gulp file:
function fm2json() {
return gulp.src('src/pages/**/*.html')
.pipe(require('gulp-gray-matter')())
.pipe($.data(function(file){
file.data.relative = file.relative,
file.data.basename = file.basename,
}))
.pipe($.pluck('data', 'new.json'))
.pipe($.data(function(file){
file.contents = new Buffer(JSON.stringify(file.data))
}))
.pipe(require('gulp-json-format')(2))
.pipe(gulp.dest('src/data'));
}
which outputs the following to new.json
[
{
"title":"some title"
"relative":"lesson01\\file.html"
"basename":"file.html"
},
{
"title":"some title 2"
"relative":"lesson02\\file2.html"
"basename":"file2.html"
}
]
I can't figure out how to just get the parent folder of the file so that relative would be "relative":"lesson01" and "relative":"lesson02".
It's not the most efficient way to do it. If it helps anyone this is what I ended up with.
function fm2json() {
return gulp.src('src/pages/**/*.html')
.pipe(require('gulp-gray-matter')())
.pipe($.data(function(file){
// What I ended up with
var relpath = file.relative;
var path = relpath.replace(/\\/g,"/"); //flip slashes
var split = path.split('/'); //split the path into an array
var parent = split[split.length - 2]; // find the array position
file.data.parent = parent,
file.data.file = file.basename,
file.data.path = path,
}))
.pipe($.pluck('data', 'new.json'))
.pipe($.data(function(file){
file.contents = new Buffer(JSON.stringify(file.data))
}))
.pipe(require('gulp-json-format')(2))
.pipe(gulp.dest('src/data'));
}
I need to create sub folders in google drive using google drive api added using nuget package in console application.
I can get the folder id of root folder. Can get children of rot folder, can also upload file in root folder. Only problem is creation of sub folders in folders.
for (int i = 1; i < array.Count(); i++)
{
var subfoldername = new Google.Apis.Drive.v2.Data.File { Title = array[i], MimeType = "application/vnd.google-apps.folder" };
ChildrenResource.ListRequest request = service.Children.List(rootfolderid);
ChildList children = request.Execute();
if (children.Items.Count > 0)
{
foreach (ChildReference c in children.Items)
{
Google.Apis.Drive.v2.Data.File file = service.Files.Get(c.Id).Execute();
if (file.MimeType == "application/vnd.google-apps.folder")
{
List<GoogleDriveFile> googledrive = new List<GoogleDriveFile>();
googledrive.Add(new GoogleDriveFile
{
OriginalFilename = file.OriginalFilename
});
}
}
}
else
{
// here need to add sub folder in folder, but this line adds folder at root
var result = service.Files.Insert(foldername).Execute();
}
Here is the way i do, when creating a sub folder in google drive it must need a parents. So before execute the string q, we need to search for the parent root id
string findRootId = "mimeType = 'application/vnd.google-apps.folder' and title ='" + RootFolder + "' and trashed = false";
IList<File> _RootId = GoogleDriveHelper.GetFiles(service, findRootId);
if (_RootId.Count == 0) {
_RootId.Add(GoogleDriveHelper.createDirectory(service, RootFolder, "", "root"));
Console.WriteLine("Root folder {0} was created.", RootFolder);
}
var id = _RootId[0].Id;
string Q = "mimeType = 'application/vnd.google-apps.folder' and '" + id + "' in parents and title ='" + GoogleDriveFolderName + "' and trashed = false";
You must add the property parents while creating a Folder.
parents[]
Collection of parent folders which contain this file.
Setting this field will put the file in all of the provided folders. On insert, if no folders are provided, the file will be placed in the default root folder.
Sample Code:
function createSubFolder() {
var body = new Object();
body.title = 'SubFolder';
body.parents = [{'id':'0B5xvxYkWPFpCUjJtZVZiMWNBQlE'}];
body.mimeType = "application/vnd.google-apps.folder";
console.log(body)
var request = gapi.client.request({
'path': '/drive/v2/files',
'method': 'POST',
'body': JSON.stringify(body)
});
request.execute(function(resp) { console.log(resp); });
}
I'm using Drive v2 in JavaScript
Hope this helps
I have written a gulp file that watches over several directories for changes, and then create concatenation to multiple specified destination.
Here is a simplified version of my project structure:
I have 2 site folders:
one/ and two/
Each site have two branch folders:
a/ and b/
Inside each branch, there are three folders:
inner/, outer/ and web/
My task is to grab similar part files from the inner and outer folders, and concatenate them into relative web folders. Below is a simple example of desired output.
-- inner/
|-- color1
|-- color2
|-- fruit1
|-- fruit2
-- outer/
|-- color1
|-- color2
|-- fruit1
|-- fruit2
-- web/
|-- colors.txt
|-- fruits.txt
I have created a config.json file to hold site specific configuration. Currently only using it to customize site paths. Here is the config.json
{
"sites": {
"one": {
"a": "/path/to/one/a/",
"b": "/path/to/one/b/"
},
"two": {
"a": "/path/to/two/a/",
"b": "/path/to/two/b/"
}
}
}
And finally here is the gulpfile.js
// Include local Gulp
var gulp = require("gulp");
// Get data from config.json
var sites = require("./config.json").sites;
// Include Gulp specific plugins
var gConcat = require("gulp-concat");
var gHeader = require("gulp-header");
var gUtil = require("gulp-util");
var gNotify = require("gulp-notify");
// Setup directories
var outer = "outer/";
var inner = "inner/";
var web = "web/";
// Misc
var alertMessage = "# GENERATED FILE - DO NOT MODIFY\n\n";
// 8 total tasks for concatenation
// Concatenate to colors.txt - 4 tasks
// Color task 1: [ Site => one ] [ Branch => a ]
gulp.task("one_a_color", function() {
return gulp.src([sites.one.a + outer + "color?", sites.one.a + inner + "color?"])
.pipe(gConcat("colors.txt"))
.pipe(gHeader(alertMessage))
.pipe(gulp.dest(sites.one.a + web))
.pipe(gNotify());
});
// Color task 2: [ Site => one ] [ Branch => b ]
gulp.task("one_b_color", function() {
return gulp.src([sites.one.b + outer + "color?", sites.one.b + inner + "color?"])
.pipe(gConcat("colors.txt"))
.pipe(gHeader(alertMessage))
.pipe(gulp.dest(sites.one.b + web))
.pipe(gNotify());
});
// Color task 3: [ Site => two ] [ Branch => a ]
gulp.task("two_a_color", function() {
return gulp.src([sites.two.a + outer + "color?", sites.two.a + inner + "color?"])
.pipe(gConcat("colors.txt"))
.pipe(gHeader(alertMessage))
.pipe(gulp.dest(sites.two.a + web))
.pipe(gNotify());
});
// Color task 4: [ Site => two ] [ Branch => b ]
gulp.task("two_b_color", function() {
return gulp.src([sites.two.b + outer + "color?", sites.two.b + inner + "color?"])
.pipe(gConcat("colors.txt"))
.pipe(gHeader(alertMessage))
.pipe(gulp.dest(sites.two.b + web))
.pipe(gNotify());
});
// Concatenate to fruits.txt - 4 tasks
// Fruit task 1: [ Site => one ] [ Branch => a ]
gulp.task("one_a_fruit", function() {
return gulp.src([sites.one.a + outer + "fruit?", sites.one.a + inner + "fruit?"])
.pipe(gConcat("fruits.txt"))
.pipe(gHeader(alertMessage))
.pipe(gulp.dest(sites.one.a + web))
.pipe(gNotify());
});
// Fruit task 2: [ Site => one ] [ Branch => b ]
gulp.task("one_b_fruit", function() {
return gulp.src([sites.one.b + outer + "fruit?", sites.one.b + inner + "fruit?"])
.pipe(gConcat("fruits.txt"))
.pipe(gHeader(alertMessage))
.pipe(gulp.dest(sites.one.b + web))
.pipe(gNotify());
});
// Fruit task 3: [ Site => two ] [ Branch => a ]
gulp.task("two_a_fruit", function() {
return gulp.src([sites.two.a + outer + "fruit?", sites.two.a + inner + "fruit?"])
.pipe(gConcat("fruits.txt"))
.pipe(gHeader(alertMessage))
.pipe(gulp.dest(sites.two.a + web))
.pipe(gNotify());
});
// Fruit task 4: [ Site => two ] [ Branch => b ]
gulp.task("two_b_fruit", function() {
return gulp.src([sites.two.b + outer + "fruit?", sites.two.b + inner + "fruit?"])
.pipe(gConcat("fruits.txt"))
.pipe(gHeader(alertMessage))
.pipe(gulp.dest(sites.two.b + web))
.pipe(gNotify());
});
// Watch for all events in specified {directories}/{files}, then trigger appropriate task
// 8 total watch jobs
gulp.task("watch", function () {
// Color related watch jobs - Total 4
// Color watch 1: [ Site => one ] [ Branch => a ]
gulp.watch([sites.one.a + outer + "**/color?", sites.one.a + inner + "**/color?"], function(event) {
gUtil.log(event.path.split("/").pop(), "=>", event.type);
gulp.start("one_a_color");
});
// Color watch 2: [ Site => one ] [ Branch => b ]
gulp.watch([sites.one.b + outer + "**/color?", sites.one.b + inner + "**/color?"], function(event) {
gUtil.log(event.path.split("/").pop(), "=>", event.type);
gulp.start("one_b_color");
});
// Color watch 3: [ Site => two ] [ Branch => a ]
gulp.watch([sites.two.a + outer + "**/color?", sites.two.a + inner + "**/color?"], function(event) {
gUtil.log(event.path.split("/").pop(), "=>", event.type);
gulp.start("two_a_color");
});
// Color watch 4: [ Site => two ] [ Branch => b ]
gulp.watch([sites.one.b + outer + "**/color?", sites.one.b + inner + "**/color?"], function(event) {
gUtil.log(event.path.split("/").pop(), "=>", event.type);
gulp.start("two_b_color");
});
// Fruit related watch jobs - Total 4
// Fruit watch 1: [ Site => one ] [ Branch => a ]
gulp.watch([sites.one.a + outer + "**/fruit?", sites.one.a + inner + "**/fruit?"], function(event) {
gUtil.log(event.path.split("/").pop(), "=>", event.type);
gulp.start("one_a_fruit");
});
// Fruit watch 2: [ Site => one ] [ Branch => b ]
gulp.watch([sites.one.b + outer + "**/fruit?", sites.one.b + inner + "**/fruit?"], function(event) {
gUtil.log(event.path.split("/").pop(), "=>", event.type);
gulp.start("one_b_fruit");
});
// Fruit watch 3: [ Site => two ] [ Branch => a ]
gulp.watch([sites.two.a + outer + "**/fruit?", sites.two.a + inner + "**/fruit?"], function(event) {
gUtil.log(event.path.split("/").pop(), "=>", event.type);
gulp.start("two_a_fruit");
});
// Fruit watch 4: [ Site => two ] [ Branch => b ]
gulp.watch([sites.one.b + outer + "**/fruit?", sites.one.b + inner + "**/fruit?"], function(event) {
gUtil.log(event.path.split("/").pop(), "=>", event.type);
gulp.start("two_b_fruit");
});
});
// Run all tasks
gulp.task("background",
[
"one_a_color", "one_b_color", "two_a_color", "two_b_color",
"one_a_fruit", "one_b_fruit", "two_a_fruit", "two_b_fruit",
"watch"
]
);
The above gulp file works and does the job. However, as you can see, most of the codes are repeated, only part that changes are the gulp.src and gulp.dest, along with the task names.
My question is. Would it be possible to simplify this gulp file, so instead of repeating codes for every tasks, maybe similar tasks can be batched together.
Not that easy a task, but let's see if we can optimise that. Gulp and Globs greatly deal with arrays, that's why we have to convert your paths to an array first:
var gulp = require('gulp');
var concat = require('gulp-concat');
var es = require('event-stream');
var sites = require('./config.json').sites;
var toArray = function(conf) {
var arr = [];
for(var key in conf) {
if(typeof conf[key] === 'object') {
arr = arr.concat(toArray(conf[key]));
} else {
arr.push(conf[key]);
}
}
return arr;
};
var sites = toArray(sites);
Now that we have the paths, we create the globs for fruits and colors.
var globs = [];
sites.forEach(function(data) {
globs.push(data + '**/color*');
globs.push(data + '**/fruit*');
});
With your current config, you get an array of 8 entries. Next, let us define the concat-task. Here is what you mean with "batched" together, we need a so called stream array (I wrote about that here). It's a simple mapping of an existing array to many gulp streams, which are merged at the end via the event-stream module. With the color/fruit thing going on, we need to be a little creative with our concat names and dest names.
Note that I use the changed plugin to prevent useless builds.
gulp.task('concat', function() {
var tasks = globs.map(function(glob) {
var file = glob.indexOf('color') >= 0 ? 'col' : 'fru';
var dest = glob.replace('**/color*','').replace('**/fruit*','') + 'web';
return gulp.src(glob)
.pipe(concat(file + '.txt'))
.pipe(gulp.dest(dest))
});
return es.merge.apply(null, tasks);
});
This task now does everything we need, and incrementally so. So our watch process is rather straightforward.
gulp.task('watch', ['concat'], function() {
gulp.watch(globs, ['concat']);
});
Hope this helps!
Update
Alright, I made some adaptations, which should prevent having your whole project rebuilt.
First, I extracted the concatStream to a function. This is actually the one thing you already did with your own sample:
var concatStream = function(glob) {
var file = glob.indexOf('color') >= 0 ? 'farbe' : 'frucht';
var dest = glob.replace('**/color*','').replace('**/fruit*','') + 'web';
return gulp.src(glob)
.pipe(concat(file + '.txt'))
.pipe(header(alertMessage))
.pipe(notify())
.pipe(gulp.dest(dest))
};
Depending on the Glob (the file pattern we select either colors or fruits from our directories), we define a new output (file, is 'col' when 'color' is in our search string, 'fru' otherwise) and a new destination (which is just the old folder without the colors/fruits search pattern).
gulp.task('concat') does now the following:
gulp.task('concat', function() {
var tasks = globs.map(concatStream);
return es.merge.apply(null, tasks);
});
Each of our globs (console.log them, if you want to know what's in there) gets mapped to the concatStream, then the new array of streams gets merged and executed.
The watch task is now new... we do kinda the same as with our 'concat' task:
gulp.task('watch', ['concat'], function() {
globs.map(function(glob) {
gulp.watch(glob, function() {
return concatStream(glob);
})
})
});
For each glob, we create a new watcher, which just calls the concatStream again.
Update
Small change
Inside glob, changing the wildcard (*) to an optional single character match (?), will allow us to use the same name for output file (ex. color and fruit).
var globs = [];
sites.forEach(function(data) {
globs.push(data + '**/color?');
globs.push(data + '**/fruit?');
});
And this as well...
var concatStream = function(glob) {
var file = glob.indexOf('color') >= 0 ? 'color' : 'fruit';
var dest = glob.replace('**/color?','').replace('**/fruit?','') + 'web';
return gulp.src(glob)
.pipe(concat(file + '.txt'))
.pipe(header(alertMessage))
.pipe(notify())
.pipe(gulp.dest(dest))
};
Now I can keep the names of color and fruit for my output file, without worrying bout glob matching the name and adding its existing content back onto the file