Imported generated JSON in JSX causes Webpack build loop - json

I've got a small postcss plugin I've made that generates a JSON file off a colors.css variable file during webpack build.
My postcss plugin
const fs = require('fs');
const postcss = require('postcss');
const capitalize = (string) => string.charAt(0).toUpperCase() + string.slice(1);
const getPropName = (string) => {
let name = clean(string.split('-'));
name.shift();
for(let k = 1; k < name.length; k++){ //start at 1 to skip 'color' prefix
name[k] = capitalize(name[k].toString());
}
return name.join('');
};
const clean = (array) => {
let i = array.length;
while(i--){
if (!array[i]) {
array.splice(i, 1);
i++;
}
}
return array;
};
module.exports = postcss.plugin('cssobject', (files, filters, options) =>
(css) => {
options = options || {
destination: ''
};
// Processing code will be added here
const getVariable = (variable) => {
let result;
css.walkRules((rules) => {
rules.walkDecls((decl) => {
const pointer = variable.replace('var(', '').replace(')','');
if(!decl.prop.match(pointer)) return;
result = decl.value;
});
});
return result;
};
css.walkRules((rules) => { //hooks into CSS stream
let i = files.length;
let cssObject = {};
while (i--) {
if(!rules.source.input.from.match(files[i])) return; //scrubs against requested files
rules.walkDecls((decl) => {
let j = filters.length;
while(j--){
if(!decl.prop.match(filters[j])) return; //scrubs against requested rules
let prop = getPropName(decl.prop);
cssObject[prop] = (decl.value.match('var'))? getVariable(decl.value) : decl.value;
}
});
}
if (options.destination) {
fs.writeFile(options.destination, JSON.stringify(cssObject), 'utf8');
}
});
}
);
I'm then importing this JSON file into a react component JSX file to then parse JSON data into a visual guide of project's used colors under AA and AAA requirements... anywho
The problem I'm having is my webpack-dev-server keeps re-building over and over again cause it thinks a change has been made to the JSX file, when in fact it's only ever a change to the JSON file being imported.
Is there a standard way of importing generated files in to a JSX without causing infinite build loops?
I've already tried having the JSON file be saved well outside of the webpack dev's watch location, and still build loop remains.
Thanks in advance!

you can change you file's timestamp, the webpack will not build after you change your file
const now = Date.now() / 1000;
const lastModifyTime = now - 11;
const lastAccessTime = now - 11;
fs.utimesSync(jsonPath, lastModifyTime, lastAccessTime);
Have a try, hope to help you.

Related

SyntaxError when trying to change files (JSON)

This code has suddenly stopped working in Visio Studio Code. it is suppose to change the JSON file text I got this code from https://www.youtube.com/watch?v=OWdyi5EGxq4. here's the code below. Help would be appreciated thanks.
const fs = require("fs");
const { json } = require("stream/consumers");
const args = process.argv.slice(2);
const inputFolder = args[0];
const dir = `${__dirname}/${inputFolder}/`;
const inputFiles = fs.readdirSync(dir).sort();
inputFiles.forEach((file) => {
let id = file.split(".").shift();
let data = JSON.parse(fs.readFileSync(`${dir}/${file}`));
data.name = `ipfwork/${id}`;
fs.writeFileSync(`${dir}/${file}`, JSON.stringify(data, null, 2));
console.log(data);
});

CheerioJS to parse data on script tag

I've been trying to parse the data that is in the script tag using cheerio however It's been difficult for the following reasons.
Can't parse string that is generated into JSON because of html-entities
More Info:
Also what is strange to me is that you have to re-load the content into cheerio a second time to get the text.
Your welcome to fork this replit or copy and paste the code to try it yourself
https://replit.com/#Graciasc/Cheerio-Script-Parse
const cheerio = require('cheerio')
const {decode} = require('html-entities')
const html = `
<body>
<script type="text/javascript"src="/data/common.0e95a19724a68c79df7b.js"></script>
<script>require("dynamic-module-registry").set("from-server-context", JSON.parse("\x7B\x22data\x22\x3A\x7B\x22available\x22\x3Atrue,\x22name\x22\x3A"Gracias"\x7D\x7D"));</script>
</body>
`;
const $ = cheerio.load(html, {
decodeEntities: false,
});
const text = $('body').find('script:not([type="text/javascript"])');
const cheerioText = text.eq(0).html();
//implement a better way to grab the string
const scriptInfo = cheerio.load(text.eq(0).html()).text();
const regex = new RegExp(/^.*?JSON.parse\(((?:(?!\)\);).)*)/);
const testing = regex.exec(scriptInfo)[1];
// real output:
//\x7B\x22data\x22\x3A\x7B\x22available\x22\x3Atrue,\x22name\x22\x3A"Gracias"\x7D\x7D when logged
console.log(testing)
// Not Working
const json = JSON.parse(testing)
const decoding = decode(testing)
// same output as testing
console.log(decoding)
// Not working
console.log('decode', JSON.parse(decoding))
//JSON
{ Data: { available: true, name: 'Gracias' } }
A clean solution is to use JSDOM
repl.it link( https://replit.com/#Graciasc/Cheerio-Script-Parse#index.js)
const { JSDOM } = require('jsdom')
const dom = new JSDOM(`<body>
<script type="text/javascript"src="/data/common.0e95a19724a68c79df7b.js"></script>
<script>require("dynamic-module-registry").set("from-server-context", JSON.parse("\x7B\x22data\x22\x3A\x7B\x22available\x22\x3Atrue,\x22name\x22\x3A"Gracias"\x7D\x7D"));</script>
</body>`)
const serializedDom = dom.serialize()
const regex = new RegExp(/^.*?JSON.parse\("((?:(?!"\)\);).)*)/gm);
const jsonString = regex.exec(serializedDom)[1];
console.log(JSON.parse(jsonString))
// output: { data: { available: true, name: 'Gracias' } }

Gulp control output based on data from an earlier pipe

I have been trying to find a way with gulp to only write out certain files based on yaml data I am collecting in the pipe. I have been able to see the file's data, but not able to get the output I expect.
In this task, I am collecting a glob of markdown files, and passing them into a pipe that reads the yaml using gulp-data, and then adds some other data to it. I then pipe it through Swig.
I'm trying to add some sort of conditional element before I pipe into gulp.dest. I found this example which got me to where I am currently.
The closest I've gotten is below:
.pipe(tap(function(file) {
if (new Date(file.data.date) >= new Date(buildRange)) {
console.log(file.path);
gulp.src(file.path)
.pipe(gulp.dest(config.paths.dest + '/underreview/'));
}
}))
What I've gotten from the console.log command is correct (it shows 2 of 50 files). But nothing gets written to the destination. If I move the gulp.dest outside of this pipe, all files get written.
I've tried using gulp-if or gulp-ignore, but have been unable to get the file.data.date into either of those modules.
Edited: Here's the complete task
module.exports = function(gulp, config, env) {
var gulpSwig = require('gulp-swig'),
swig = require('swig'),
data = require('gulp-data'),
matter = require('gray-matter'),
runSequence = require('run-sequence'),
// BrowserSync
reload = config.browserSync.reload,
_ = require('lodash'),
Path = require('path'),
requireDir = require('require-dir'),
marked = require('marked'),
readingTime = require('reading-time'),
postsData = [],
postsTags = [],
pdate = null,
buildRange = new Date(new Date().setDate(new Date().getDate()-14));
sitebuilddate = null,
through = require('through2'),
gutil = require('gulp-util'),
rename = require('gulp-rename'),
File = require('vinyl'),
$if = require('gulp-if'),
ignore = require('gulp-ignore'),
tap = require('gulp-tap');
var opts = {
defaults: {
cache: false
},
setup: function(Swig) {
Swig.setDefaults({
loader: Swig.loaders.fs(config.paths.source + '/templates')});
}
};
// Full of the compiled HTML file
function targetPathFull(path, data) {
return Path.join(Path.dirname(path), targetPath(data));
}
gulp.task('templates2:under', function() {
return gulp.src(config.paths.source + '/content/**/*.md')
.pipe(data(function(file) {
postData = [];
var matterObject = matter(String(file.contents)), // extract front matter data
type = matterObject.data.type, // page type
body = matterObject.content,
postData = matterObject.data,
moreData = requireDir(config.paths.data),
data = {},
bodySwig;
bodySwig = swig.compile(body, opts);
// Use swig to render partials first
body = bodySwig(data);
// Process markdown
if (Path.extname(file.path) === '.md') {
body = marked(body);
}
// Inherit the correct template based on type
if (type) {
var compiled = _.template(
"{% extends 'pages/${type}.html' %}{% block body %}${body}{% endblock %}"
// Always use longform until a different template for different types is needed
//"{% extends 'pages/longform.html' %}{% block body %}${body}{% endblock %}"
);
body = compiled({
"type": type,
"body": body
});
}
file.path = targetPathFull(file.path, postData);
moreData.path = targetPath(postData);
_.merge(data, postData, moreData);
data.url = data.site.domain + "/" + data.slug;
// Copy the processed body text back into the file object so Gulp can keep piping
file.contents = new Buffer(body);
return data;
}))
.pipe(gulpSwig(opts))
.pipe(tap(function(file) {
if (new Date(file.data.date) >= new Date(buildRange)) {
console.log(file.path);
gulp.src(file.path)
.pipe(gulp.dest(config.paths.dest + '/underreview/'));
}
}))
.pipe(gulp.dest(config.paths.dest + '/underreview/'));
});
}
So, possibly not the best solution, but after some re-factoring I came up with this:
.pipe(gulp.dest(config.paths.dest + '/underreview/'))
.pipe(tap(function(file) {
if (new Date(file.data.date) < new Date(buildRange)) {
console.log(file.data.path);
del(config.paths.dest + '/underreview/' + file.data.path)
}
}))
I moved the gulp-tap to after the output, and then I am deleting the file that was just written.

Convert .txt file to JSON

I want to convert a fairly unorganized and unstructured text file to JSON format. I want to be able to use the city ID information. Is there anyway I can convert this to JSON?
UPDATE: I also found this solution after a while. Very simple way to get the JSON of any tab separated text file.
https://shancarter.github.io/mr-data-converter/
You can try to use tsv2json this tool can reads a tsv file from stdin and writes a json file to stdout.
It's distributed in source file, to compile it you need to download D compiler and then run dmd tsv2json.d.
If you have more complex task there is another tool named tsv-utils
TSV to JSON in nodejs
var file_name = 'city_list.txt';
var readline = require('readline');
var fs = require('fs');
var lineReader = readline.createInterface({
input: fs.createReadStream(file_name)
});
var isHeader = false;
var columnNames = [];
function parseLine(line) {
return line.trim().split('\t')
}
function createRowObject(values) {
var rowObject = {};
columnNames.forEach((value,index) => {
rowObject[value] = values[index];
});
return rowObject;
}
var json = {};
json[file_name] = [];
lineReader.on('line', function (line) {
if(!isHeader) {
columnNames = parseLine(line);
isHeader = true;
} else {
json[file_name].push(createRowObject(parseLine(line)));
}
});
lineReader.on('close', function () {
fs.writeFileSync(file_name + '.json', JSON.stringify(json,null,2));
});

Is file path valid in gulp src

I'm looking for solution which will help me to find is file path valid. And if file path not valid to show some error.
gulp.task("scripts-libraries", ["googlecharts"], function () {
var scrlibpaths = [
"./node_modules/jquery/dist/jquery.min.js",
"./node_modules/bootstrap/dist/js/bootstrap.min.js",
"./libs/AdminLTE-2.3.0/plugins/slimScroll/jquery.slimscroll.min.js",
"./libs/AdminLTE-2.3.0/plugins/fastclick/fastclick.min.js",
"./libs/adminLTE-app.js",
"./node_modules/moment/min/moment.min.js",
"./node_modules/jquery.inputmask/dist/jquery.inputmask.bundle.js",
"./node_modules/bootstrap-timepicker/js/bootstrap-timepicker.min.js",
"./node_modules/bootstrap-checkbox/dist/js/bootstrap-checkbox.min.js",
"./node_modules/bootstrap-daterangepicker/daterangepicker.js",
"./node_modules/select2/dist/js/select2.full.min.js",
"./node_modules/toastr/build/toastr.min.js",
"./node_modules/knockout/build/output/knockout-latest.js",
"./node_modules/selectize/dist/js/standalone/selectize.min.js",
//"./src/jquery.multiselect.js"
];
for (var i = 0; i < scrlibpaths.length; i++) {
if (scrlibpaths[i].pipe(size()) === 0) {
console.log("There is no" + scrlibpaths[i] + " file on your machine");
return;
}
}
return gulp.src(scrlibpaths)
.pipe(plumber())
.pipe(concat("bundle.libraries.js"))
.pipe(gulp.dest(config.path.dist + "/js"));
});
So how can i make this to work?
You can use the glob module to check if the paths/globs you pass to gulp.src() refer to existing files. Gulp itself uses glob internally via glob-stream so this should be the most reliable option.
Here's a function that uses glob and that you can use as a more or less drop-in replacement for the regular gulp.src():
var glob = require('glob');
function gulpSrc(paths) {
paths = (paths instanceof Array) ? paths : [paths];
var existingPaths = paths.filter(function(path) {
if (glob.sync(path).length === 0) {
console.log(path + ' doesnt exist');
return false;
}
return true;
});
return gulp.src((paths.length === existingPaths.length) ? paths : []);
}
You can then use it like this:
return gulpSrc(scrlibpaths)
.pipe(plumber())
.pipe(concat("bundle.libraries.js"))
.pipe(gulp.dest(config.path.dist + "/js"));
If any of the paths/globs in srclibpaths doesn't exist a warning is logged and the stream will be empty (meaning no files will be processed at all).
Since gulp is just like any other node script you can use accessSync to check whether the file exists (I assume you probably want to be synchronous).
var fs = require('fs');
scrlibpaths.map(function(path) {
try {
fs.accessSync(path);
} catch (e) {
console.log("There is no " + path + " file on your machine");
}
});