Gulp or grunt plugin to increment a value? - gulp

In my package.json file, I have a key/value pair that represents an release candidate value.
{
"name": "my-product",
"version": "1.0.0",
"rc": "1",
...
}
Is there a Gulp plugin that will increment the rc value for me that I can use in a task? If not how can I increment it?

The gulp-json-modify plugin works nicely with my requirements. Steps are
Install the plugin
$ npm install --save-dev gulp-json-modify
Then in gulpfile.js
const {src, dest} = require('gulp');
const json = require(gulp-modiffy-json);
const package_json = path.join(path.dirname(__filename),'package.json');
const pkg = JSON.parse(fs.readFileSync(package_json));
const name = pkg.name;
const patch = pkg.patch;
const rc = pkg.rc;
function bumpValue(key, value) {
// var value = Number(patch);
value++;
src(['./package.json'])
.pipe(json({
key: key,
value: value
}))
.pipe(dest('./'));
}
function bumpPatch(cb) {
bumpValue('patch', patch);
cb();
}
exports.bumpPatch = bumpPatch
function bumpRc(cb) {
bumpValue('rc', patch);
cb();
}
exports.bumpRc = bumpRc

Related

Building a nix derivation within a module

I created the following module services/invidious.nix
{ pkgs, stdenv, ... }:
stdenv.mkDerivation {
name = "invidious";
container = pkgs.dockerTools.buildLayeredImage {
name = "invidious";
contents = [ pkgs.busybox pkgs.bash pkgs.invidious ];
config = {
Cmd = [ "/bin/bash" ];
Env = [];
Volumes = {};
};
};
}
My eventual goal is to have several services in modules and use nix-build to build each of those services as containers, and write the resulting image names to a file:
let
config = import ./config.nix;
pkgs = config.pkgs;
invidious = import ./services/invidious.nix;
in rec {
serviceimages = pkgs.writeText "images.txt" ''
${invidious(pkgs)}
'';
}
and my config.nix just has the pkgs pinned version:
{
# nixos-22.05 / https://status.nixos.org/
pkgs = import (fetchTarball "https://github.com/NixOS/nixpkgs/archive/d86a4619b7e80bddb6c01bc01a954f368c56d1df.tar.gz") {};
}
However, when I use nix-build, I get the following error:
nix-build services.nix -A serviceimages
these 2 derivations will be built:
/nix/store/dbl3bzc05pssq3q9g8wd2i92xpmwf5bb-invidious.drv
/nix/store/4x31hx9nxcbbksi2hsim08djrsj4h1zh-images.txt.drv
building '/nix/store/dbl3bzc05pssq3q9g8wd2i92xpmwf5bb-invidious.drv'...
unpacking sources
variable $src or $srcs should point to the source
error: builder for '/nix/store/dbl3bzc05pssq3q9g8wd2i92xpmwf5bb-invidious.drv' failed with exit code 1;
last 2 log lines:
> unpacking sources
> variable $src or $srcs should point to the source
For full logs, run 'nix log /nix/store/dbl3bzc05pssq3q9g8wd2i92xpmwf5bb-invidious.drv'.
If I try to pull the full logs using the command given, I get the following:
nix log /nix/store/dbl3bzc05pssq3q9g8wd2i92xpmwf5bb-invidious.drv
error: experimental Nix feature 'nix-command' is disabled; use '--extra-experimental-features nix-command' to override
...and if I enable the experimental feature, I see the following:
nix --extra-experimental-features nix-command log /nix/store/dbl3bzc05pssq3q9g8wd2i92xpmwf5bb-invidious.drv
#nix { "action": "setPhase", "phase": "unpackPhase" }
unpacking sources
variable $src or $srcs should point to the source
If I just try to build the same service in a single file, it successfully builds the image:
let
# nixos-22.05 / https://status.nixos.org/
pkgs = import (fetchTarball "https://github.com/NixOS/nixpkgs/archive/d86a4619b7e80bddb6c01bc01a954f368c56d1df.tar.gz") {};
in rec {
docker = pkgs.dockerTools.buildLayeredImage {
name = "invidious";
contents = [ pkgs.busybox pkgs.bash pkgs.invidious ];
config = {
Cmd = [ "/bin/sh" ];
Env = [];
Volumes = {};
};
};
results = pkgs.writeText "images.txt" ''
${docker}
'';
}
What am I doing wrong with my attempt to use modules?
I figured it out. I didn't need the mkDerivation. I just need the buildLayeredImage
{ pkgs, ... }:
pkgs.dockerTools.buildLayeredImage {
name = "invidious";
contents = [ pkgs.busybox pkgs.bash pkgs.invidious ];
config = {
Cmd = [ "/bin/bash" ];
Env = [];
Volumes = {};
};
}
The service.nix and config.nix stay the same:
let
config = import ./config.nix;
pkgs = config.pkgs;
invidious = import ./services/invidious.nix;
in rec {
serviceimages = pkgs.writeText "images.txt" ''
${invidious(pkgs)}
'';
}
{
# nixos-22.05 / https://status.nixos.org/
pkgs = import (fetchTarball "https://github.com/NixOS/nixpkgs/archive/d86a4619b7e80bddb6c01bc01a954f368c56d1df.tar.gz") {};
}

Change name of provideCompletionItems command in VsCode Extension

I am creating a VsCode extension which provides inline completions. Currently, it can be accessed using the default "Trigger Suggest" command. I want it to be different, as it should be different from the default option. The main code looks like this:
import * as vscode from 'vscode';
export function activate(context: vscode.ExtensionContext) {
const command = 'getairesponse';
const files = ['c','cpp','csharp','java','javascript', 'php', 'python', 'SQL', 'HTML', 'plaintext'];
const provider1 = vscode.languages.registerCompletionItemProvider(files, {
async provideCompletionItems(document: vscode.TextDocument, position: vscode.Position, token: vscode.CancellationToken, context: vscode.CompletionContext) {
var editor = vscode.window.activeTextEditor;
if (editor){
var cursorPosition = editor.selection.active;
var input = editor.document.getText(new vscode.Range(0, 0, cursorPosition.line, cursorPosition.character));
var out = getOutput(input);
const simpleCompletion = new vscode.CompletionItem(await out);
return [simpleCompletion];
}
}
});
context.subscriptions.push(provider1);
}
I want the command for the extension to be equal to the constant command. I have also included the command in the package.json file:
"contributes": {
"commands": [
{
"command": "getairesponse",
"title": "Get AI Response"
}
]
}

webpack : is there a good hook / custom function to dump out resolved configuration?

I'm somewhat of a newbie with webpack and have been experimenting with easier ways to adjust/merge webpack configurations.
The following code, added to webpack/lib/webpack.js has been pretty helpful:
this is the standard webpack.js:
function webpack(options, callback) {
var compiler;
if(Array.isArray(options)) {
compiler = new MultiCompiler(options.map(function(options) {
return webpack(options);
}));
} else if(typeof options === "object") {
new WebpackOptionsDefaulter().process(options);
compiler = new Compiler();
compiler.options = options;
compiler.options = new WebpackOptionsApply().process(options, compiler);
new NodeEnvironmentPlugin().apply(compiler);
compiler.applyPlugins("environment");
compiler.applyPlugins("after-environment");
} else {
throw new Error("Invalid argument: options");
}
if(callback) {
if(typeof callback !== "function") throw new Error("Invalid argument: callback");
if(options.watch === true || (Array.isArray(options) &&
options.some(function(o) {
return o.watch;
}))) {
var watchOptions = (!Array.isArray(options) ? options : options[0]).watchOptions || {};
// TODO remove this in next major version
var watchDelay = (!Array.isArray(options) ? options : options[0]).watchDelay;
if(watchDelay) {
console.warn("options.watchDelay is deprecated: Use 'options.watchOptions.aggregateTimeout' instead");
watchOptions.aggregateTimeout = watchDelay;
}
return compiler.watch(watchOptions, callback);
}
compiler.run(callback);
}
this is my code:
//customization start
fs = require('fs');
var fnp_dump = 'webpack.dump.json';
fs.writeFile(fnp_dump, JSON.stringify(options, null, 2), function(err) {
if(err) {
return console.log(err);
}
console.log("dumping dump.webpack.js.final.json from webpack.js to: " + fnp_dump);
});
//customization end
return compiler;
}
The basic idea is that it dumps out the final json/js options object after webpack has finished sorting out the usual webpack.base.js + webpack.development.js. Since it's, at that point, just a fully-resolved javascript object, it doesn't really matter how the config.js files were written by individual developers.
Now you can diff options sent to webpack (this is an example of tranforming webpack 1 to webpack 2 configurations:
diff 003/webpack.dump.json 004/webpack.dump.json
< "loader": "style!css!postcss-loader!sass"
---
> "use": [
> {
> "loader": "style-loader"
> },
> {
> "loader": "postcss-loader"
> },
> {
> "loader": "sass-loader"
> }
> ]
However, I am customizing webpack.js directly and need to re-apply my patch after each npm update webpack. Is there a better way?
If your webpack.config.js is a function, you can call it on your own to resolve to an object.
If you have several configs (you mentioned webpack.base.js and webpack.development.js) you can use Webpack Merge to just combine your options to a single object, and then write it to the file system.
I would recommend you to have an own script in package.json to do this job, which you can then always call after your webpack job:
...,
"scripts": {
"dump-options": "scriptThatMergesConfigsAndWritesToFS.js",
"webpack-job": "webpack ... && npm run dump-options",
...
},
...
UPDATE
After some more research I realized, that the resolved options object is stored in the compiler object. The compiler object is passed to Plugins, and therefore you can easily write a Plugin that writes the config to a file, as I did here (not tested).
I also realized, that the Plugins cannot be stringified, as they are functions, so be aware of losing the Plugin configuration information.
I ended up writing my own plugin (and now notice that wtho wrote one too). It worked for me - note you need to have the bit of code that handles circular references:
// WebPackCompilationObserver.js
function WebPackCompilationObserver(options) {
WebPackCompilationObserver.options = options || {};
}
WebPackCompilationObserver.prototype.apply = function(compiler) {
compiler.plugin("emit", function(compilation, callback) {
var fs = require('fs');
var fnp_dump = WebPackCompilationObserver.options.dump_filename;
if (! fnp_dump) {
fnp_dump = "./dump.webpack.options.json";
console.log("please specify dump_filename path in the WebPackCompilationObserver.options, otherwise using default:" % fnp_dump);
}
if (fnp_dump){
console.log("dumping compilation.options to: " + fnp_dump);
var cache = [];
fs.writeFile(fnp_dump, JSON.stringify(compilation.options, function(key, value) {
if (typeof value === 'object' && value !== null) {
if (cache.indexOf(value) !== -1) {
// Circular reference found, discard key
return;
}
// Store value in our collection
cache.push(value);
}
return value;
}, 2),
function(err) {
if (err) {
return console.log(err);
}
});
cache = null;
}
callback();
});
};
module.exports = WebPackCompilationObserver;
To use it:
webpack.config.development.js:
....
var WebPackCompilationObserver = require("./WebPackCompilationObserver");
....
config.plugins = config.plugins.concat([
....
,new WebPackCompilationObserver({dump_filename: '../dumpithere.json'})
])

npm start failure. $JSON.stringify.apply is not a function

I'm trying to run a react app but running into issues.
This is the error trace.
ERROR in Template execution failed: TypeError: $JSON.stringify.apply is not a function
ERROR in TypeError: $JSON.stringify.apply is not a function
- json-output-template.js:96 stringify
/CoD/src/origin-cms/origin-ui/ui.reactapps/src/json-output-template.js:96:26
- json-output-template.js:126 module.exports
/CoD/src/origin-cms/origin-ui/ui.reactapps/src/json-output-template.js:126:36
- index.js:265
[ui.reactapps]/[html-webpack-plugin]/index.js:265:16
- util.js:16 tryCatcher
[ui.reactapps]/[html-webpack-plugin]/[bluebird]/js/release/util.js:16:23
- promise.js:512 Promise._settlePromiseFromHandler
[ui.reactapps]/[html-webpack-plugin]/[bluebird]/js/release/promise.js:512:31
- promise.js:569 Promise._settlePromise
[ui.reactapps]/[html-webpack-plugin]/[bluebird]/js/release/promise.js:569:18
- promise.js:606 Promise._settlePromiseCtx
[ui.reactapps]/[html-webpack-plugin]/[bluebird]/js/release/promise.js:606:10
- async.js:138 Async._drainQueue
[ui.reactapps]/[html-webpack-plugin]/[bluebird]/js/release/async.js:138:12
- async.js:143 Async._drainQueues
[ui.reactapps]/[html-webpack-plugin]/[bluebird]/js/release/async.js:143:10
- async.js:17 Immediate.Async.drainQueues
[ui.reactapps]/[html-webpack-plugin]/[bluebird]/js/release/async.js:17:14
The template its trying to execute is
module.exports = function (templateParams) {
var obj = {};
obj.js = (templateParams.htmlWebpackPlugin.options.globalJs || []).concat( templateParams.htmlWebpackPlugin.files.js );
obj.css = templateParams.htmlWebpackPlugin.files.css;
return JSON.stringify(obj);
};
Application version's that I'm using
AMAC02PQ06PG8WN:ui.reactapps subhash.sulanthar$ node --version
v7.8.0
AMAC02PQ06PG8WN:ui.reactapps subhash.sulanthar$ npm --version
4.2.0
AMAC02PQ06PG8WN:ui.reactapps subhash.sulanthar$ yarn --version
0.21.3
AMAC02PQ06PG8WN:ui.reactapps subhash.sulanthar$ nvm --version
0.33.1
I've tried reinstalling node, yarn, cleared application caches, etc - all to no avail.
It all looks fine when I console the parameters from the template js:
module.exports = function (templateParams) {
var obj = {};
obj.js = (templateParams.htmlWebpackPlugin.options.globalJs || []).concat( templateParams.htmlWebpackPlugin.files.js );
obj.css = templateParams.htmlWebpackPlugin.files.css;
console.log("Object: ", obj);
console.log("JSON: ", JSON);
console.log("JSON.stringify: ", JSON.stringify);
return JSON.stringify(obj);
};
Console:
Object: { js:
[ '/etc/react-apps-chunks/b43f1ef1ef32e943333a.login-modal.js',
'/etc/react-apps-chunks/bb104a33ab4b80479b28.compare-plans.js' ],
css: [ '/etc/react-apps-chunks/ef77cfb892b9199adc95.login-modal.styles.css' ] }
JSON: {}
JSON.stringify: function stringify(it) {
// eslint-disable-line no-unused-vars
return $JSON.stringify.apply($JSON, arguments);
}
Object: { js:
[ '/etc/react-apps-chunks/react.js',
'/etc/react-apps-chunks/react-dom.js',
'/etc/react-apps-chunks/8c3d64978772b4558f5b.originReactAemIntegrator.js' ],
css: [] }
JSON: {}
JSON.stringify: function stringify(it) {
// eslint-disable-line no-unused-vars
return $JSON.stringify.apply($JSON, arguments);
}
Any idea if I'm missing something here?

Create a dynamic array for use in grunt concat

I need to concatenate a set files based on variables I have defined my package.json.
// package.json
...
"layouts": [
{
"page": "home",
"version": "a"
},
{
"page": "about",
"version": "a"
},
{
"page": "contact",
"version": "b"
}
]
...
In grunt I am then building these into a JSON array and pumping it into the src parameter in my grunt-concat-contrib task.
// gruntfile.js
...
var package = grunt.file.readJSON('package.json'),
targets = package.layouts,
paths = [];
for (var target = 0; target < targets.length; target++) {
paths.push("layouts/" + targets[target]['page'] + "/" + targets[target]['version'] + "/*.php");
};
var paths = JSON.stringify(paths);
grunt.log.write(paths); // Writing this to console for debugging
grunt.initConfig({
concat: {
build: {
src: paths,
dest: 'mysite/Code.php',
options: {
separator: '?>\n\n'
}
}
}
});
...
My issue is that the paths variable is not working inside of the initConfig when it is assigned to JSON.stringify(paths).
If I manually input the array like the following that I copied from where I logged the paths variable to the console, it works!
var paths = ["layouts/home/a/*.php","layouts/about/a/*.php","layouts/contact/b/*.php"];
What am I missing?
Derp. I fixed it, I didn't need to JSON.stringify() the array.
Final working gruntfile is below:
// gruntfile.js
...
var package = grunt.file.readJSON('package.json'),
targets = package.layouts,
paths = [];
for (var target = 0; target < targets.length; target++) {
paths.push("layouts/" + targets[target]['page'] + "/" + targets[target]['version'] + "/*.php");
};
grunt.initConfig({
concat: {
build: {
src: paths,
dest: 'mysite/Code.php',
options: {
separator: '?>\n\n'
}
}
}
});
...