webpack : is there a good hook / custom function to dump out resolved configuration? - configuration

I'm somewhat of a newbie with webpack and have been experimenting with easier ways to adjust/merge webpack configurations.
The following code, added to webpack/lib/webpack.js has been pretty helpful:
this is the standard webpack.js:
function webpack(options, callback) {
var compiler;
if(Array.isArray(options)) {
compiler = new MultiCompiler(options.map(function(options) {
return webpack(options);
}));
} else if(typeof options === "object") {
new WebpackOptionsDefaulter().process(options);
compiler = new Compiler();
compiler.options = options;
compiler.options = new WebpackOptionsApply().process(options, compiler);
new NodeEnvironmentPlugin().apply(compiler);
compiler.applyPlugins("environment");
compiler.applyPlugins("after-environment");
} else {
throw new Error("Invalid argument: options");
}
if(callback) {
if(typeof callback !== "function") throw new Error("Invalid argument: callback");
if(options.watch === true || (Array.isArray(options) &&
options.some(function(o) {
return o.watch;
}))) {
var watchOptions = (!Array.isArray(options) ? options : options[0]).watchOptions || {};
// TODO remove this in next major version
var watchDelay = (!Array.isArray(options) ? options : options[0]).watchDelay;
if(watchDelay) {
console.warn("options.watchDelay is deprecated: Use 'options.watchOptions.aggregateTimeout' instead");
watchOptions.aggregateTimeout = watchDelay;
}
return compiler.watch(watchOptions, callback);
}
compiler.run(callback);
}
this is my code:
//customization start
fs = require('fs');
var fnp_dump = 'webpack.dump.json';
fs.writeFile(fnp_dump, JSON.stringify(options, null, 2), function(err) {
if(err) {
return console.log(err);
}
console.log("dumping dump.webpack.js.final.json from webpack.js to: " + fnp_dump);
});
//customization end
return compiler;
}
The basic idea is that it dumps out the final json/js options object after webpack has finished sorting out the usual webpack.base.js + webpack.development.js. Since it's, at that point, just a fully-resolved javascript object, it doesn't really matter how the config.js files were written by individual developers.
Now you can diff options sent to webpack (this is an example of tranforming webpack 1 to webpack 2 configurations:
diff 003/webpack.dump.json 004/webpack.dump.json
< "loader": "style!css!postcss-loader!sass"
---
> "use": [
> {
> "loader": "style-loader"
> },
> {
> "loader": "postcss-loader"
> },
> {
> "loader": "sass-loader"
> }
> ]
However, I am customizing webpack.js directly and need to re-apply my patch after each npm update webpack. Is there a better way?

If your webpack.config.js is a function, you can call it on your own to resolve to an object.
If you have several configs (you mentioned webpack.base.js and webpack.development.js) you can use Webpack Merge to just combine your options to a single object, and then write it to the file system.
I would recommend you to have an own script in package.json to do this job, which you can then always call after your webpack job:
...,
"scripts": {
"dump-options": "scriptThatMergesConfigsAndWritesToFS.js",
"webpack-job": "webpack ... && npm run dump-options",
...
},
...
UPDATE
After some more research I realized, that the resolved options object is stored in the compiler object. The compiler object is passed to Plugins, and therefore you can easily write a Plugin that writes the config to a file, as I did here (not tested).
I also realized, that the Plugins cannot be stringified, as they are functions, so be aware of losing the Plugin configuration information.

I ended up writing my own plugin (and now notice that wtho wrote one too). It worked for me - note you need to have the bit of code that handles circular references:
// WebPackCompilationObserver.js
function WebPackCompilationObserver(options) {
WebPackCompilationObserver.options = options || {};
}
WebPackCompilationObserver.prototype.apply = function(compiler) {
compiler.plugin("emit", function(compilation, callback) {
var fs = require('fs');
var fnp_dump = WebPackCompilationObserver.options.dump_filename;
if (! fnp_dump) {
fnp_dump = "./dump.webpack.options.json";
console.log("please specify dump_filename path in the WebPackCompilationObserver.options, otherwise using default:" % fnp_dump);
}
if (fnp_dump){
console.log("dumping compilation.options to: " + fnp_dump);
var cache = [];
fs.writeFile(fnp_dump, JSON.stringify(compilation.options, function(key, value) {
if (typeof value === 'object' && value !== null) {
if (cache.indexOf(value) !== -1) {
// Circular reference found, discard key
return;
}
// Store value in our collection
cache.push(value);
}
return value;
}, 2),
function(err) {
if (err) {
return console.log(err);
}
});
cache = null;
}
callback();
});
};
module.exports = WebPackCompilationObserver;
To use it:
webpack.config.development.js:
....
var WebPackCompilationObserver = require("./WebPackCompilationObserver");
....
config.plugins = config.plugins.concat([
....
,new WebPackCompilationObserver({dump_filename: '../dumpithere.json'})
])

Related

Merge mixin in vue

I'm working in vue/quasar application.
I've my mixin like this in my view.cshtml
var mixin1 = {
data: function () {
return { data1:0,data2:'' }
}
,
beforeCreate: async function () {
...}
},
methods: {
addformulaire(url) {
},
Kilometrique() { }
}
}
And I want merge with my content in js file (it's to centralize same action an severals cshtml)
const nomeMixins = {
data: function () {
return { loadingcdt: false, lstclt: [], filterclient: [], loadingdoc: false, lstdoc: [], filterdoc: [] }
},
computed: {
libmntpiece(v) { return "toto"; }
},
methods: {
findinfcomplemtX3(cdecltx3, cdedocx3) {
},
preremplissagex3: async function (cdecltx3, cdedocx3) {
}
}
}
};
I want merge this 2 miwin in one. But when I try assign or var mixin = { ...mixin1, ...nomeMixins };
I've only mixin1 nothing about methods,data from my js file nomeMixins but merging failed cause I've same key in my json object. I'm trying to make a foreach but failed too
Someone try to merge to mixin / json object with same key in the case you've no double child property ?
You cant merge mixins in that way. the spread syntax will overwrite keys e.g data, computed, methods etc and final result will not be suitable for your purpose.
refer documentation for adding mixins in your component. Also note that You can easily add multiple mixins in any component, so I don't think combination of two mixins will be any useful.
UPDATE
reply to YannickIngenierie answer and pointing out mistakes in this article
Global Mixins are not declared like this
// not global mixin; on contrary MyMixin is local
// and only available in one component.
new Vue({
el: '#demo',
mixins: [MyMixin]
});
Local Mixins are not declared like this
// NOT local mixin; on contrary its global Mixin
// and available to all components
const DataLoader = Vue.mixin({....}}
Vue.component("article-card", {
mixins: [DataLoader], // no need of this
template: "#article-card-template",
created() {
this.load("https://jsonplaceholder.typicode.com/posts/1")
}
});
Point is refer documentation first before reading any article written by some random guy, including me. Do slight comparison what he is saying whats in documentation.
After working and searching... I find this one And understand that I can add directly mixin in my compoment (don't laught I'm begging with vue few months ago)
my custommiwin.js
const DataLoader = Vue.mixin({
data: function () {
return { loadingcdt: false, lstclt: [], filterclient: [], loadingdoc: false, lstdoc: [], filterdoc: [] }
},
methods: {
filterClt: async function (val, update, abort) {
if (val.length < 3) { abort(); return; }
else {//recherche
this.loadingcdt = true;
let res = await axios...
this.loadingcdt = false;
}
update(() => {
const needle = val.toLowerCase();
this.filterclient = this.lstclt.filter(v => v.libelle.toLowerCase().indexOf(needle) > -1 || v.id.toLowerCase().indexOf(needle) > -1);
})
},
filterDocument: async function (val, update, abort, cdecltx3) {
if (!cdecltx3 || val.length < 3) { abort(); return; }
else {//recherche
this.loadingdoc = true;
let res = await axios({ ...) }
this.loadingdoc = false;
}
update(() => {
const needle = val.toLowerCase();
this.filterdoc = this.lstdoc.filter(v => v.id.toLowerCase().indexOf(needle) > -1);
})
},
}
});
and in my compoment.js I add this
mixins: [DataLoader],
I include all my js file in my cshtml file

Node JS: Make a flat json from a tree json

I was writing a node.js script to combine all the json files in a directory and store the result as a new json file. I tried do the job to a great extent but it has few flaws.
A.json
[
{
"id": "addEmoticon1",
"description": "Message to greet the user.",
"defaultMessage": "Hello, {name}!"
},
{
"id": "addPhoto1",
"description": "How are youu.",
"defaultMessage": "How are you??"
}
]
B.json
[
{
"id": "close1",
"description": "Close it.",
"defaultMessage": "Close!"
}
]
What I finally need is:
result.json
{
"addEmoticon1": "Hello, {name}!",
"addPhoto1": "How are you??",
"close1": "Close!"
}
I wrote a node.js script:
var fs = require('fs');
function readFiles(dirname, onFileContent, onError) {
fs.readdir(dirname, function(err, filenames) {
if (err) {
onError(err);
return;
}
filenames.forEach(function(filename) {
fs.readFile(dirname + filename, 'utf-8', function(err, content) {
if (err) {
onError(err);
return;
}
onFileContent(filename, content);
});
});
});
}
var data = {};
readFiles('C:/node/test/', function(filename, content) {
data[filename] = content;
var lines = content.split('\n');
lines.forEach(function(line) {
var parts = line.split('"');
if (parts[1] == 'id') {
fs.appendFile('result.json', parts[3]+': ', function (err) {});
}
if (parts[1] == 'defaultMessage') {
fs.appendFile('result.json', parts[3]+',\n', function (err) {});
}
});
}, function(err) {
throw err;
});
It extracts the 'id' and 'defaultMessage' but is not able to append correctly.
What I get:
result.json
addEmoticon1: addPhoto1: Hello, {name}!,
close1: How are you??,
Close!,
This output is different every time I run my script.
Aim 1: Surround items in double quotes,
Aim 2: Add curly braces at the top and at the end
Aim 3: No comma at the end of last element
Aim 4: Same output every time I run my script
I'll start with the finished solution...
There's a big explanation at the end of this answer. Let's try to think big-picture for a little bit first tho.
readdirp('.')
.fmap(filter(match(/\.json$/)))
.fmap(map(readfilep))
.fmap(map(fmap(JSON.parse)))
.fmap(concatp)
.fmap(flatten)
.fmap(reduce(createMap)({}))
.fmap(data=> JSON.stringify(data, null, '\t'))
.fmap(writefilep(resolve(__dirname, 'result.json')))
.then(filename=> console.log('wrote results to %s', filename), err=>console.error(err));
Console output
wrote results to /path/to/result.json
result.json (I added a c.json with some data to show that this works with more than 2 files)
{
"addEmoticon1": "Hello, {name}!",
"addPhoto1": "How are you??",
"close1": "Close!",
"somethingelse": "Something!"
}
Implementation
I made Promise-based interfaces for readdir and readFile and writeFile
import {readdir, readFile, writeFile} from 'fs';
const readdirp = dir=>
new Promise((pass,fail)=>
readdir(dir, (err, filenames) =>
err ? fail(err) : pass(mapResolve (dir) (filenames))));
const readfilep = path=>
new Promise((pass,fail)=>
readFile(path, 'utf8', (err,data)=>
err ? fail(err) : pass(data)));
const writefilep = path=> data=>
new Promise((pass,fail)=>
writeFile(path, data, err=>
err ? fail(err) : pass(path)));
In order to map functions to our Promises, we needed an fmap utility. Notice how we take care to bubble errors up.
Promise.prototype.fmap = function fmap(f) {
return new Promise((pass,fail) =>
this.then(x=> pass(f(x)), fail));
};
And here's the rest of the utilities
const fmap = f=> x=> x.fmap(f);
const mapResolve = dir=> map(x=>resolve(dir,x));
const map = f=> xs=> xs.map(x=> f(x));
const filter = f=> xs=> xs.filter(x=> f(x));
const match = re=> s=> re.test(s);
const concatp = xs=> Promise.all(xs);
const reduce = f=> y=> xs=> xs.reduce((y,x)=> f(y)(x), y);
const flatten = reduce(y=> x=> y.concat(Array.isArray(x) ? flatten (x) : x)) ([]);
Lastly, the one custom function that does your work
const createMap = map=> ({id, defaultMessage})=>
Object.assign(map, {[id]: defaultMessage});
And here's c.json
[
{
"id": "somethingelse",
"description": "something",
"defaultMessage": "Something!"
}
]
"Why so many little functions ?"
Well despite what you may think, you have a pretty big problem. And big problems are solved by combining several small solutions. The most prominent advantage of this code is that each function has a very distinct purpose and it will always produce the same results for the same inputs. This means each function can be used other places in your program. Another advantage is that smaller functions are easier to read, reason with, and debug.
Compare all of this to the other answers given here; #BlazeSahlen's in particular. That's over 60 lines of code that's basically only usable to solve this one particular problem. And it doesn't even filter out non-JSON files. So the next time you need to create a sequence of actions on reading/writing files, you'll have to rewrite most of those 60 lines each time. It creates lots of duplicated code and hard-to-find bugs because of exhausting boilerplate. And all that manual error-handling... wow, just kill me now. And he/she thought callback hell was bad ? haha, he/she just created yet another circle of hell all on his/her own.
All the code together...
Functions appear (roughly) in the order they are used
import {readdir, readFile, writeFile} from 'fs';
import {resolve} from 'path';
// logp: Promise<Value> -> Void
const logp = p=> p.then(x=> console.log(x), x=> console.err(x));
// fmap : Promise<a> -> (a->b) -> Promise<b>
Promise.prototype.fmap = function fmap(f) {
return new Promise((pass,fail) =>
this.then(x=> pass(f(x)), fail));
};
// fmap : (a->b) -> F<a> -> F<b>
const fmap = f=> x=> x.fmap(f);
// readdirp : String -> Promise<Array<String>>
const readdirp = dir=>
new Promise((pass,fail)=>
readdir(dir, (err, filenames) =>
err ? fail(err) : pass(mapResolve (dir) (filenames))));
// mapResolve : String -> Array<String> -> Array<String>
const mapResolve = dir=> map(x=>resolve(dir,x));
// map : (a->b) -> Array<a> -> Array<b>
const map = f=> xs=> xs.map(x=> f(x));
// filter : (Value -> Boolean) -> Array<Value> -> Array<Value>
const filter = f=> xs=> xs.filter(x=> f(x));
// match : RegExp -> String -> Boolean
const match = re=> s=> re.test(s);
// readfilep : String -> Promise<String>
const readfilep = path=>
new Promise((pass,fail)=>
readFile(path, 'utf8', (err,data)=>
err ? fail(err) : pass(data)));
// concatp : Array<Promise<Value>> -> Array<Value>
const concatp = xs=> Promise.all(xs);
// reduce : (b->a->b) -> b -> Array<a> -> b
const reduce = f=> y=> xs=> xs.reduce((y,x)=> f(y)(x), y);
// flatten : Array<Array<Value>> -> Array<Value>
const flatten = reduce(y=> x=> y.concat(Array.isArray(x) ? flatten (x) : x)) ([]);
// writefilep : String -> Value -> Promise<String>
const writefilep = path=> data=>
new Promise((pass,fail)=>
writeFile(path, data, err=>
err ? fail(err) : pass(path)));
// -----------------------------------------------------------------------------
// createMap : Object -> Object -> Object
const createMap = map=> ({id, defaultMessage})=>
Object.assign(map, {[id]: defaultMessage});
// do it !
readdirp('.')
.fmap(filter(match(/\.json$/)))
.fmap(map(readfilep))
.fmap(map(fmap(JSON.parse)))
.fmap(concatp)
.fmap(flatten)
.fmap(reduce(createMap)({}))
.fmap(data=> JSON.stringify(data, null, '\t'))
.fmap(writefilep(resolve(__dirname, 'result.json')))
.then(filename=> console.log('wrote results to %s', filename), err=>console.error(err));
Still having trouble following along?
It's not easy to see how these things work at first. This is a particularly squirrely problem because the data gets nested very quickly. Thankfully that doesn't mean our code has to be a big nested mess just to solve the problem ! Notice the code stays nice and flat even when we're dealing with things like a Promise of an Array of Promises of JSON...
// Here we are reading directory '.'
// We will get a Promise<Array<String>>
// Let's say the files are 'a.json', 'b.json', 'c.json', and 'run.js'
// Promise will look like this:
// Promise<['a.json', 'b.json', 'c.json', 'run.js']>
readdirp('.')
// Now we're going to strip out any non-JSON files
// Promise<['a.json', 'b.json', 'c.json']>
.fmap(filter(match(/\.json$/)))
// call `readfilep` on each of the files
// We will get <Promise<Array<Promise<JSON>>>>
// Don't freak out, it's not that bad!
// Promise<[Promise<JSON>, Promise<JSON>. Promise<JSON>]>
.fmap(map(readfilep))
// for each file's Promise, we want to parse the data as JSON
// JSON.parse returns an object, so the structure will be the same
// except JSON will be an object!
// Promise<[Promise<Object>, Promise<Object>, Promise<Object>]>
.fmap(map(fmap(JSON.parse)))
// Now we can start collapsing some of the structure
// `concatp` will convert Array<Promise<Value>> to Array<Value>
// We will get
// Promise<[Object, Object, Object]>
// Remember, we have 3 Objects; one for each parsed JSON file
.fmap(concatp)
// Your particular JSON structures are Arrays, which are also Objects
// so that means `concatp` will actually return Promise<[Array, Array, Array]
// but we'd like to flatten that
// that way each parsed JSON file gets mushed into a single data set
// after flatten, we will have
// Promise<Array<Object>>
.fmap(flatten)
// Here's where it all comes together
// now that we have a single Promise of an Array containing all of your objects ...
// We can simply reduce the array and create the mapping of key:values that you wish
// `createMap` is custom tailored for the mapping you need
// we initialize the `reduce` with an empty object, {}
// after it runs, we will have Promise<Object>
// where Object is your result
.fmap(reduce(createMap)({}))
// It's all downhill from here
// We currently have Promise<Object>
// but before we write that to a file, we need to convert it to JSON
// JSON.stringify(data, null, '\t') will pretty print the JSON using tab to indent
// After this, we will have Promise<JSON>
.fmap(data=> JSON.stringify(data, null, '\t'))
// Now that we have a JSON, we can easily write this to a file
// We'll use `writefilep` to write the result to `result.json` in the current working directory
// I wrote `writefilep` to pass the filename on success
// so when this finishes, we will have
// Promise<Path>
// You could have it return Promise<Void> like writeFile sends void to the callback. up to you.
.fmap(writefilep(resolve(__dirname, 'result.json')))
// the grand finale
// alert the user that everything is done (or if an error occurred)
// Remember `.then` is like a fork in the road:
// the code will go to the left function on success, and the right on failure
// Here, we're using a generic function to say we wrote the file out
// If a failure happens, we write that to console.error
.then(filename=> console.log('wrote results to %s', filename), err=>console.error(err));
All done !
Assumed files is list of arrays; [a, b, ...];
var res = {};
files.reduce((a, b) => a.concat(b), []).forEach(o => res[o.id] = o.defaultMessage);
But you need not to get all files at once.
Just add this code to onFileContent callback.
JSON.parse(fileContent).forEach(o => res[o.id] = o.defaultMessage);
Also, you should to add any final callback to your readFiles.
And in this callback:
fs.writeFile('result.json', JSON.stringify(res));
So, final solution for you:
var fs = require('fs');
function task(dir, it, cb) {
fs.readdir(dir, (err, names) => {
if (err) return cb([err]);
var errors = [], c = names.length;
names.forEach(name => {
fs.readFile(dir + name, 'utf-8', (err, data) => {
if (err) return errors.push(err);
try {
it(JSON.parse(data)); // We get a file data!
} catch(e) {
errors.push('Invalid json in ' + name + ': '+e.message);
}
if (!--c) cb(errors); // We are finish
});
});
});
}
var res = {};
task('C:/node/test/', (data) => data.forEach(o => res[o.id] = o.defaultMessage), (errors) => {
// Some files can be wrong
errors.forEach(err => console.error(err));
// But we anyway write received data
fs.writeFile('C:/node/test/result.json', JSON.stringify(res), (err) => {
if (err) console.error(err);
else console.log('Task finished. see results.json');
})
});
this should do it once you have your json in variables a and b:
var a = [
{
"id": "addEmoticon1",
"description": "Message to greet the user.",
"defaultMessage": "Hello, {name}!"
},
{
"id": "addPhoto1",
"description": "How are youu.",
"defaultMessage": "How are you??"
}
];
var b = [
{
"id": "close1",
"description": "Close it.",
"defaultMessage": "Close!"
}
];
var c = a.concat(b);
var res = []
for (var i = 0; i < c.length; i++){
res[ c[i].id ] = c[i].defaultMessage;
}
console.log(res);
Here's my solution:
function readFiles(dirname, onFileContent, onError) {
fs.readdir(dirname, function(err, filenames) {
/**
* We'll store the parsed JSON data in this array
* #type {Array}
*/
var fileContent = [];
if (err) {
onError(err);
} else {
filenames.forEach(function(filename) {
// Reading the file (synchronously) and storing the parsed JSON output (parsing from string to JSON object)
var jsonObject = JSON.parse(fs.readFileSync(dirname + filename, 'utf-8'));
// Pushing the parsed JSON output into array
fileContent.push(jsonObject);
});
// Calling the callback
onFileContent(fileContent);
}
});
}
readFiles('./files/',function(fileContent) {
/**
* We'll store the final output object here
* #type {Object}
*/
var output = {};
// Loop over the JSON objects
fileContent.forEach(function(each) {
// Looping within each object
for (var index in each) {
// Copying the `id` as key and the `defaultMessage` as value and storing in output object
output[each[index].id] = each[index].defaultMessage;
}
});
// Writing the file (synchronously) after converting the JSON object back to string
fs.writeFileSync('result.json', JSON.stringify(output));
}, function(err) {
throw err;
});
Notable difference is that I've not used the asynchronous readFile and writeFile functions as they'd needlessly complicate the example. This example is meant to showcase the use of JSON.parse and JSON.stringify to do what OP wants.
UPDATE:
var fs = require('fs');
function readFiles(dirname, onEachFilename, onComplete) {
fs.readdir(dirname, function(err, filenames) {
if (err) {
throw err;
} else {
// Prepending the dirname to each filename
filenames.forEach(function(each, index, array) {
array[index] = dirname + each;
});
// Calling aync.map which accepts these parameters:
// filenames <-------- array of filenames
// onEachFilename <--- function which will be applied on each filename
// onComplete <------- function to call when the all elements of filenames array have been processed
require('async').map(filenames, onEachFilename, onComplete);
}
});
}
readFiles('./files/', function(item, callback) {
// Read the file asynchronously
fs.readFile(item, function(err, data) {
if (err) {
callback(err);
} else {
callback(null, JSON.parse(data));
}
});
}, function(err, results) {
/**
* We'll store the final output object here
* #type {Object}
*/
var output = {};
if (err) {
throw err;
} else {
// Loop over the JSON objects
results.forEach(function(each) {
// Looping within each object
for (var index in each) {
// Copying the `id` as key and the `defaultMessage` as value and storing in output object
output[each[index].id] = each[index].defaultMessage;
}
});
// Writing the file (synchronously) after converting the JSON object back to string
fs.writeFileSync('result.json', JSON.stringify(output));
}
});
This is a simple asynchronous implementation of the same, using readFile. For more information, async.map.

Inserting specific file at index of a stream

Basicly this gulp script finds the 'file1,file2,file3' in a folder, builds its dependencies (which are in the same folder) and add it all to 1 file.
Now, my problem is, i have a custom file which i have to put in after file2. How can i achieve this?
function gatherAMD(stream, file) {
var moduleId = file.path.match(/file\.(.+)\.js/)[1];
return stream.pipe(amdOptimize(`file.${moduleId}`, {
baseUrl: "fileslocation/location",
exclude: [ 'jquery' ]
}));
}
gulp.task("js", function() {
var included = [];
var files = "file1,file2,file3"
var stream = null;
if (files.indexOf(",") == -1) {
stream = gulp.src('fileslocation/location/file.'+ files+ '.js', { base: "fileslocation/location"});
} else {
stream = gulp.src(`fileslocation/location/file.{${files}}.js`, { base: "fileslocation/location"});
}
return stream.pipe(debug())
.pipe(foreach(gatherAMD))
.pipe(filter(function(file) {
if (included.indexOf(file.path) === -1) {
included.push(file.path);
return true;
} else {
return false;
}
}))
.pipe(concat({path: 'fileslocation/custom.js', base: 'fileslocation'}))
.pipe(gulp.dest("fileslocation"));
});
You can probably use map for checking the current file in the stream, and then adding it from some logic and then using gulp-inject to add a file to the stream.
Something like,
return stream.pipe(debug())
.pipe(foreach(gatherAMD))
.pipe(filter(function(file) {
if (included.indexOf(file.path) === -1) {
included.push(file.path);
return true;
} else {
return false;
}
}))
.pipe(map(function(file, callback) {
if (file.relative === 'file2.js') {
// inject file to the stream here
}
callback(null, file);
}))
.pipe(concat({path: 'fileslocation/custom.js', base: 'fileslocation'}))
.pipe(gulp.dest("fileslocation"));

nodejs parse mysql row to object

I'm a little newbie in node.js + mysql + object oriented.
Following question here I would like the 'Content' object to use the values returned by a mysql query. What I'm doing now I find it is really redundant and possibly stupid as rows[0] itself is the object I want to use. Any better way for doing this? Or different approach if this is wrong also appreciated.
(I'm using binary uuid keys that must be hex-stringifyed again to send as resource response)
content.js:
function Content() {
this.id = '';
this.name = '';
this.domain = '';
}
Content.prototype.validate = function(path, queryParams) {
...
return true;
};
Content.prototype.whatever = function(apiVersion, params, callback) {
...
return callback(null, newParams);
};
mysql.js:
MySQLDb.SELECT_CONTENT_ID = "SELECT id, name, domain FROM content WHERE id = UNHEX(?)";
MySQLDb.prototype.findContentByID = function(id, callback) {
this.dbConnection.query(MySQLDb.SELECT_CONTENT_ID, [ id ],
function(err, rows, fields) {
var content = new Content();
if (rows.length > 0) {
var i = 0;
for (var key in rows[0]) {
if (rows[0].hasOwnProperty(key) && content.hasOwnProperty(key)) {
// BINARY(16) --> HEX string
if (fields[i].columnType === 254) {
content[key] = rows[0][key].toString('hex').toUpperCase();
} else {
content[key] = rows[0][key];
}
} else {
console.log('Column ' + key + ' out of sync on table "content"');
}
i += 1;
}
}
callback(err, content);
});
};
contentRes.js:
contentRes.GETWhatever = function(req, res) {
db.findContentByID(req.params.id, function onContent(err, content) {
if (err || !content.validate(req.path, req.query)) {
return res.send({});
}
content.whatever(req.query.apiVersion, req.query.d,
function onWhateverdone(err, params) {
if (err) {
return res.send({});
}
return res.send(params);
});
});
};
I think a lot of people would say you are doing it generally the right way even though it admittedly feels redundant.
It might feel a little cleaner if you refactored your code such that you could call the Content() constructor with an optional object, in this case rows[0] although if you were keeping it clean you wouldn't have access to the fields so you would take a different approach to the data type conversion - either by selecting the HEX representation in query or simply having your Content() know it needs to convert the id property.
Keeping it fairly simple (by which I mean ignoring making the constructor a bit more intelligent as well as any error detection or handling), you would have:
function Content(baseObj) {
this.id = (baseObj && baseObj.id) ? baseObj.id.toString('hex').toUpperCase() : '';
this.name = (baseObj && baseObj.name) ? baseObj.name : '';
this.domain = (baseObj && baseObj.domain) ? baseObj.domain : '';
}
Then you could do something like this:
MySQLDb.prototype.findContentByID = function(id, callback) {
this.dbConnection.query(MySQLDb.SELECT_CONTENT_ID, [ id ],
function(err, rows, fields) {
if (err) return callback(err,null);
return callback(err, new Content(rows[0]));
});
You 'could' also grab the rows[0] object directly, HEX the UUID more or less in situ and modify the __proto__ of the object, or under Harmony/ES6 use the setPrototypeOf() method.
MySQLDb.prototype.findContentByID = function(id, callback) {
this.dbConnection.query(MySQLDb.SELECT_CONTENT_ID, [ id ],
function(err, rows, fields) {
if (err) return callback(err,null);
var content = rows[0];
content.id = content.id.toString('hex').toUpperCase();
content.__proto__ = Content.prototype;
return callback(err, content);
});
Note, I said you 'could' do this. Reasonable people can differ on whether you 'should' do this. __proto__ is deprecated (although it works just fine in Node from what I have seen). If you take this general approach, I would probably suggest using setPrototypeOf(), and install a polyfill until you are otherwise running with ES6.
Just trying to give you a couple of other more terse ways to do this, given that I think the redundancy/verbosity of the first version is what you didn't like. Hope it helps.

How to obtain arguments.callee.caller?

I am trying to find out the name of the function that called my Google Apps Script function, by using arguments.callee.caller as in How do you find out the caller function in JavaScript?, but it seems there's no such property exported. (However, arguments.callee exists.)
How can I get that calling function's name in Google Apps Script?
As a secondary question, why isn't arguments.callee.caller there?
I made this function:
function getCaller()
{
var stack;
var ret = "";
try
{
throw new Error("Whoops!");
}
catch (e)
{
stack = e.stack;
}
finally
{
var matchArr = stack.match(/\(.*\)/g);
if (matchArr.length > 2)
{
tmp = matchArr[2];
ret = tmp.slice(1, tmp.length - 1) + "()";
}
return ret;
}
}
It throws as Error() and then gets the function name from the stack trace.
Try vary the '2' in matchArr[2] when using wrappers.
caller is a non-standard extension to JavaScript (that is, many browsers have it but it's not part of the EcmaScript standard) and not implemented in Apps Script.
I made a function to get the call stack based on jgrotius's answer:
function getCallStack()
{
var returnValue = "";
var framePattern = /\sat (.+?):(\d+) \((.+?)\)/;
try
{
throw new Error('');
}
catch (e)
{
returnValue = e.stack
.split('\n')
.filter(function(frame, index) {
return !frame.isBlank() && index > 0;
})
// at app/lib/debug:21 (getCaller)
.map(function(frame) {
var parts = frame.match(framePattern);
return {
file: parts[1],
line: parseInt(parts[2]),
func: parts[3]
};
});
}
return returnValue;
}
This is my updated version of the other two proposed solutions:
const getStacktrace = () => {
try {
throw new Error('')
} catch (exception) {
// example: at getStacktrace (helper:6:11)
const regex = /\sat (.+?) \((.+?):(\d+):(\d+)\)/
return exception
.stack
.split('\n')
.slice(1, -1)
.filter((frame, index) => {
return frame && index > 0
})
.map((frame) => {
const parts = frame.match(regex)
return {
function: parts[1],
file: parts[2],
line: parseInt(parts[3]),
column: parseInt(parts[4])
}
})
}
}
P.S.: please not that the regex has changed and also we are ignoring the first element of the stacktrace, since it is the getStacktrace function itself.