How to response a plain text in feathersjs websocket api? - feathersjs

I define a feathers service api as below:
class Monitor {
find(_) {
const metrics = prom.register.metrics();
log.info(metrics);
return new Promise((resolve) => {
resolve({text: metrics});
});
}
}
function restFormatter(req, res) {
res.format({
'text/plain': function() {
log('xxxx:', res);
res.end(`The Message is: "${res.data}"`);
}
});
}
module.exports = function () {
const app = this;
// Initialize our service with any options it requires
const service = new Monitor();
app.configure(rest(restFormatter)).use('/metrics', service);
// Get our initialize service to that we can bind hooks
const monitorService = app.service('/metrics');
// Set up our before hooks
monitorService.before(hooks.before);
// Set up our after hooks
monitorService.after(hooks.after);
return service;
};
module.exports.Monitor = Monitor;
when call this API from browser, I get below response:
"# HELP nodejs_gc_runs_total Count of total garbage collections.\n# TYPE nodejs_gc_runs_total counter\n\n# HELP nodejs_gc_pause_seconds_total Time spent in GC Pause in seconds.\n# TYPE nodejs_gc_pause_seconds_total counter\n\n# HELP nodejs_gc_reclaimed_bytes_total Total number of bytes reclaimed by GC.\n# TYPE nodejs_gc_reclaimed_bytes_total counter\n"
from above output you can see that feathersjs doesn't return the data in plain text format. It transpile my response text into a string. Below is the output from express service shown in the browser:
# HELP nodejs_gc_runs_total Count of total garbage collections.
# TYPE nodejs_gc_runs_total counter
# HELP nodejs_gc_pause_seconds_total Time spent in GC Pause in seconds.
# TYPE nodejs_gc_pause_seconds_total counter
# HELP nodejs_gc_reclaimed_bytes_total Total number of bytes reclaimed by GC.
# TYPE nodejs_gc_reclaimed_bytes_total counter
# HELP newConnection The number of requests served
# TYPE newConnection counter
this output is what I really want. How can I make them feathersjs service return above output?
Below is my feathersjs configuration part:
app
.use(compress())
.options('*', cors())
.use(cors())
.use('/', serveStatic(app.get('public')))
.use(bodyParser.json())
.use(bodyParser.urlencoded({extended: true}))
.configure(hooks())
.configure(rest())
.configure(
swagger({
docsPath: '/docs',
uiIndex: path.join(__dirname, '../public/docs.html'),
info: {
title: process.env.npm_package_fullName,
description: process.env.npm_package_description
}
})
)
.configure(
primus(
{
transformer: 'websockets',
timeout: false
},
(primus) => {
primus.library();
primus.save(path.join(__dirname, '../public/dist/primus.js'));
}
)
)
.configure(services)
.configure(middleware);

You are configuring feathers-rest twice which is why you still get the old output. Remove the app.configure(rest(restFormatter)) from your service file and then either change .configure(rest()) to .configure(rest(restFormatter)) in the main file to use the formatter to apply to all services or register a custom middleware for the service that does the formatting just for that service:
app.use('/metrics', service, function(req, res) {
res.format({
'text/plain': function() {
log('xxxx:', res);
res.end(`The Message is: "${res.data}"`);
}
});
});

Related

React, Hardhat frontend smart contract method calling, how to do so?

I'm using hardhat locally and have a react frontend up and running but I can't call the methods without errors.
I've tried both ethers.js and web3.
Here's my code and attempts. Please let me know if you see what I'm doing wrong.
I'm trying to interact with contracts that are deployed in the local hardhat env through web3
I'm unable to get back the data from the contract, here's the info
I have:
var list = await contract.methods.getList();
console.log("list ", list );
which gets me
list {arguments: Array(0), call: ƒ, send: ƒ, encodeABI: ƒ, estimateGas: ƒ, …}
When I do
var list = await contract.methods.getList().call();
console.log("list ", list );
I get this error in the browser:
Returned values aren't valid, did it run Out of Gas? You might also see this error if you are not using the correct ABI for the contract you are retrieving data from, requesting data from a block number that does not exist, or querying a node which is not fully synced.
I do:
Setup in console:
npx hardhat node
>Started HTTP and WebSocket JSON-RPC server at http://127.0.0.1:8545/
>Accounts
>========
>...
npx hardhat compile
> Nothing to compile
npx hardhat run scripts/deploy.js --network hardhat
Note: In the deploy.js file, I do a
const list = await contract.getList();
console.log("list", list ); // correctly outputs ["string", "string"]
The method:
mapping(uint256 => address) internal list;
uint256 internal listCount;
function getList() public override view returns (address[] memory) {
address[] memory assets = new address[](listCount);
for (uint256 i = 0; i < listCount; i++) {
assets[i] = list[i];
}
return assets;
}
In react App.js:
import Contract_from './data/abi/Contract_.json'; // Contract_ is a placer
var contract = new web3.eth.Contract(Contract_, address_given_on_deploy);
var contractAddress = await contract .options.address; // correctly outputs
var list= await contract.methods.getList().call();
console.log("list", list);
As you see, this doesn't return the values from the method. What am I doing wrong here?
For any reason and may be likely the issue, here's my config:
require("#nomiclabs/hardhat-waffle");
// openzeppelin adds
require("#nomiclabs/hardhat-ethers");
require('#openzeppelin/hardhat-upgrades');
//abi
require('hardhat-abi-exporter');
// This is a sample Hardhat task. To learn how to create your own go to
// https://hardhat.org/guides/create-task.html
task("accounts", "Prints the list of accounts", async () => {
const accounts = await ethers.getSigners();
for (const account of accounts) {
console.log(account.address);
}
});
// You need to export an object to set up your config
// Go to https://hardhat.org/config/ to learn more
/**
* #type import('hardhat/config').HardhatUserConfig
*/
module.exports = {
networks: {
hardhat: {
gas: 12000000,
blockGasLimit: 0x1fffffffffffff,
allowUnlimitedContractSize: true,
timeout: 1800000,
chainId: 1337
}
},
solidity: {
compilers: [
{
version: "0.8.0",
settings: {
optimizer: {
enabled: true,
runs: 1000
}
}
},
{
version: "0.8.2",
settings: {
optimizer: {
enabled: true,
runs: 1000
}
}
},
],
},
abiExporter: {
path: './frontend/src/data/abi',
clear: true,
flat: true,
only: [],
spacing: 2
}
}
__
I thought maybe i would try ethers.js since that is what i do my testing in but same issue.
For whatever reason, I can "get" the contracts, print the methods that belong to them, but I can't actually call the methods.
Here's my ethers.js brevity:
provider = new ethers.providers.Web3Provider(window.ethereum);
if(provider != null){
const _contract = new ethers.Contract(address, _Contract, provider);
var list= await _contract.getList().call();
console.log("list", list);
}
The error i get from this is:
Error: call revert exception (method="getList()", errorArgs=null, errorName=null, errorSignature=null, reason=null, code=CALL_EXCEPTION, version=abi/5.4.0)
I've tried numerous contracts in the protocol and same thing for each

Is it possible to perform an action with `context` on the init of the app?

I'm simply looking for something like this
app.on('init', async context => {
...
})
Basically I just need to make to calls to the github API, but I'm not sure there is a way to do it without using the API client inside the Context object.
I ended up using probot-scheduler
const createScheduler = require('probot-scheduler')
module.exports = app => {
createScheduler(app, {
delay: false
})
robot.on('schedule.repository', context => {
// this is called on startup and can access context
})
}
I tried probot-scheduler but it didn't exist - perhaps removed in an update?
In any case, I managed to do it after lots of digging by using the actual app object - it's .auth() method returns a promise containing the GitHubAPI interface:
https://probot.github.io/api/latest/classes/application.html#auth
module.exports = app => {
router.get('/hello-world', async (req, res) => {
const github = await app.auth();
const result = await github.repos.listForOrg({'org':'org name});
console.log(result);
})
}
.auth() takes the ID of the installation if you wish to access private data. If called empty, the client will can only retrieve public data.
You can get the installation ID by calling .auth() without paramaters, and then listInstallations():
const github = await app.auth();
const result = github.apps.listInstallations();
console.log(result);
You get an array including IDs that you can in .auth().

Node Elasticsearch - Bulk indexing not working - Content-Type header [application/x-ldjson] is not supported

Being new to elasticsearch, am exploring it by integrating with node and trying to execute the following online git example in windows.
https://github.com/sitepoint-editors/node-elasticsearch-tutorial
while trying to import the data of 1000 items from data.json, the execution 'node index.js' is failing with the following error.
By enabling the trace, I now see the following as the root cause from the bulk function.
"error": "Content-Type header [application/x-ldjson] is not supported",
** "status": 406**
I see a change log from https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/changelog.html which says following
13.0.0 (Apr 24 2017) bulk and other APIs that send line-delimited JSON bodies now use the Content-Type: application/x-ndjson header #507
Any idea how to resolve this content type issue in index.js?
index.js
(function () {
'use strict';
const fs = require('fs');
const elasticsearch = require('elasticsearch');
const esClient = new elasticsearch.Client({
host: 'localhost:9200',
log: 'error'
});
const bulkIndex = function bulkIndex(index, type, data) {
let bulkBody = [];
data.forEach(item => {
bulkBody.push({
index: {
_index: index,
_type: type,
_id: item.id
}
});
bulkBody.push(item);
});
esClient.bulk({body: bulkBody})
.then(response => {
console.log(`Inside bulk3...`);
let errorCount = 0;
response.items.forEach(item => {
if (item.index && item.index.error) {
console.log(++errorCount, item.index.error);
}
});
console.log(`Successfully indexed ${data.length - errorCount} out of ${data.length} items`);
})
.catch(console.err);
};
// only for testing purposes
// all calls should be initiated through the module
const test = function test() {
const articlesRaw = fs.readFileSync('data.json');
const articles = JSON.parse(articlesRaw);
console.log(`${articles.length} items parsed from data file`);
bulkIndex('library', 'article', articles);
};
test();
module.exports = {
bulkIndex
};
} ());
my local windows environment:
java version 1.8.0_121
elasticsearch version 6.1.1
node version v8.9.4
npm version 5.6.0
The bulk function doesn't return a promise. It accepts a callback function as a parameter.
esClient.bulk(
{body: bulkBody},
function(err, response) {
if (err) { console.err(err); return; }
console.log(`Inside bulk3...`);
let errorCount = 0;
response.items.forEach(item => {
if (item.index && item.index.error) {
console.log(++errorCount, item.index.error);
}
});
console.log(`Successfully indexed ${data.length - errorCount} out of ${data.length} items`);
}
)
or use promisify to convert a function accepting an (err, value) => ... style callback to a function that returns a promise.
const esClientBulk = util.promisify(esClient.bulk)
esClientBulk({body: bulkBody})
.then(...)
.catch(...)
EDIT: Just found out that elasticsearch-js supports both callbacks and promises. So this should not be an issue.
By looking at the package.json of the project that you've linked, it uses elasticsearch-js version ^11.0.1 which is an old version and that is sending requests with application/x-ldjson header for bulk upload, which is not supported by newer elasticsearch versions. So, upgrading elasticsearch-js to a newer version (current latest is 14.0.0) should fix it.

AngularJS File Upload to Backend Express Server

I am trying to do a file upload using angularjs, using angular-file-upload library (https://github.com/danialfarid/angular-file-upload)
Here is my code
// ===============================My HTML File===========================
<input type="file" ng-file-select="onFileSelect($files)">
// ===============================My Controller==========================
var $scope.formObj = {
name: "Test"
};
var fileToUpload;
$scope.onFileSelect = function (file) {
fileToUpload = file[0];
};
// POSt request to /api/items
$scope.addItem = function() {
console.log($scope.formObj);
$scope.upload = $upload.upload({
url: '/api/items',
method: 'POST',
data: { myObj: $scope.formObj },
file: fileToUpload
}).success(function(data, status, headers, config) {
console.log("success");
});
};
// ================================My Backend=============================
// This is the function that will receive POST request to /api/items
exports.create = function(req, res) {
console.log(req.body); // req.body is just an empty object. ==> {}
// apparently, I found all the data to be in req._readableState.buffer[0]
// in the form of a buffer
var buffer = req._readableState.buffer[0];
// trying to console.log the buffer.toString, resulting in something similar to this
// { name: "Test", image: Object }
console.log(buffer.toString());
return res.send(200);
};
So my backend received the formObj with all its properties and values, however, the actual file data itself, whether in the form of buffer, or base64, or whatever, never gets received.
I wonder why. This is my first time working with file uploading, so I don't understand the concept.
Please point me in the right direction
If you are using Latest version of Express, you'd notice that
app.use(express.multipart()); is no longer bundled with express.
So do the following configuration changes. in express.js
var multer = require('multer');
app.use(multer({ dest: './uploads/'}));
You'd find that after doing this you would find the data and file , in req.body req.file respectively.
Hope it helps

A solution for streaming JSON using oboe.js in AngularJS?

I'm pretty new to Angular so maybe I'm asking the impossible but anyway, here is my challenge.
As our server cannot paginate JSON data I would like to stream the JSON and add it page by page to the controller's model. The user doesn't have to wait for the entire stream to load so I refresh the view fo every X (pagesize) records.
I found oboe.js for parsing the JSON stream and added it using bower to my project. (bower install oboe --save).
I want to update the controllers model during the streaming. I did not use the $q implementation of pomises, because there is only one .resolve(...) possible and I want multiple pages of data loaded via the stream so the $digest needs to be called with every page. The restful service that is called is /service/tasks/search
I created a factory with a search function which I call from within the controller:
'use strict';
angular.module('myStreamingApp')
.factory('Stream', function() {
return {
search: function(schema, scope) {
var loaded = 0;
var pagesize = 100;
// JSON streaming parser oboe.js
oboe({
url: '/service/' + schema + '/search'
})
// process every node which has a schema
.node('{schema}', function(rec) {
// push the record to the model data
scope.data.push(rec);
loaded++;
// if there is another page received then refresh the view
if (loaded % pagesize === 0) {
scope.$digest();
}
})
.fail(function(err) {
console.log('streaming error' + err.thrown ? (err.thrown.message):'');
})
.done(function() {
scope.$digest();
});
}
};
});
My controller:
'use strict';
angular.module('myStreamingApp')
.controller('MyCtrl', function($scope, Stream) {
$scope.data = [];
Stream.search('tasks', $scope);
});
It all seams to work. After a while however the system gets slow and the http call doesn't terminate after refreshing the browser. Also the browser (chrome) crashes when there are too many records loaded.
Maybe I'm on the wrong track because passing the scope to the factory search function doesn't "feel" right and I suspect that calling the $digest on that scope is giving me trouble. Any ideas on this subject are welcome. Especially if you have an idea on implementing it where the factory (or service) could return a promise and I could use
$scope.data = Stream.search('tasks');
in the controller.
I digged in a little further and came up with the following solution. It might help someone:
The factory (named Stream) has a search function which is passed parameters for the Ajax request and a callback function. The callback is being called for every page of data loaded by the stream. The callback function is called via a deferred.promise so the scope can be update automatically with every page. To access the search function I use a service (named Search) which initially returns an empty aray of data. As the stream progresses the factory calls the callback function passed by the service and the page is added to the data.
I now can call the Search service form within a controller and assign the return value to the scopes data array.
The service and the factory:
'use strict';
angular.module('myStreamingApp')
.service('Search', function(Stream) {
return function(params) {
// initialize the data
var data = [];
// add the data page by page using a stream
Stream.search(params, function(page) {
// a page of records is received.
// add each record to the data
_.each(page, function(record) {
data.push(record);
});
});
return data;
};
})
.factory('Stream', function($q) {
return {
// the search function calls the oboe module to get the JSON data in a stream
search: function(params, callback) {
// the defer will be resolved immediately
var defer = $q.defer();
var promise = defer.promise;
// counter for the received records
var counter = 0;
// I use an arbitrary page size.
var pagesize = 100;
// initialize the page of records
var page = [];
// call the oboe unction to start the stream
oboe({
url: '/api/' + params.schema + '/search',
method: 'GET'
})
// once the stream starts we can resolve the defer.
.start(function() {
defer.resolve();
})
// for every node containing an _id
.node('{_id}', function(node) {
// we push the node to the page
page.push(node);
counter++;
// if the pagesize is reached return the page using the promise
if (counter % pagesize === 0) {
promise.then(callback(page));
// initialize the page
page = [];
}
})
.done(function() {
// when the stream is done make surethe last page of nodes is returned
promise.then(callback(page));
});
return promise;
}
};
});
Now I can call the service from within a controller and assign the response of the service to the scope:
$scope.mydata = Search({schema: 'tasks'});
Update august 30, 2014
I have created an angular-oboe module with the above solution a little bit more structured.
https://github.com/RonB/angular-oboe