Google cloud functions - cannot read property 'getApplicationDefault' - google-cloud-functions

I have deployed a cloud function to invoke a dataflow pipeline template and trying to trigger the function by placing the file in cloud storage bucket.
As node.js prerequisite I have done,
npm init
npm install --save googleapis
Index.js
const google = require('googleapis');
exports.goWithTheDataFlow = function(event, callback) {
const file = event.data;
google.auth.getApplicationDefault(function (err, authClient, projectId) {
if (err) {
throw err;
}
console.log(projectId);
if (authClient.createScopedRequired && authClient.createScopedRequired()) {
authClient = authClient.createScoped([
'https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/userinfo.email'
]);
}
const dataflow = google.dataflow({ version: 'v1b3', auth: authClient });
console.log(`gs://${file.bucket}/${file.name}`);
dataflow.projects.templates.create({
projectId: projectId,
resource: {
parameters: {
inputFile: `gs://${file.bucket}/${file.name}`
},
jobName: 'cloud-fn-dataflow-test',
gcsPath: 'gs://jaison/templates/ApacheBeamTemplate'
}
}, function(err, response) {
if (err) {
console.error("problem running dataflow template, error was: ", err);
}
console.log("Dataflow template response: ", response);
callback();
});
});
callback();
};
Command used to deploy cloud function:
gcloud beta functions deploy goWithTheDataFlow --stage-bucket cf100stage --trigger-bucket cf100
Dataflow(Apache beam):
I was able to execute the dataflow template from console and below is the path of the template,
'gs://jaison/templates/ApacheBeamTemplate'
Function crashes with below error:
TypeError: Cannot read property 'getApplicationDefault' of undefined
at exports.goWithTheDataFlow (/user_code/index.js:11:17) at
/var/tmp/worker/worker.js:695:16 at /var/tmp/worker/worker.js:660:9 at
_combinedTickCallback (internal/process/next_tick.js:73:7) at process._tickDomainCallback (internal/process/next_tick.js:128:9)
Looks like I am missing libraries. Not sure how to fix this. Please help.

My cloud function works with below changes,
1.Setting up GOOGLE_APPLICATION_CREDENTIALS to service account json file
export GOOGLE_APPLICATION_CREDENTIALS="/path/of/svc/json/file.json"
2.index.js
var {google} = require('googleapis');
exports.TriggerBeam = (event, callback) => {
const file = event.data;
const context = event.context;
console.log(`Event ${context.eventId}`);
console.log(` Event Type: ${context.eventType}`);
console.log(` Bucket: ${file.bucket}`);
console.log(` File: ${file.name}`);
console.log(` Metageneration: ${file.metageneration}`);
console.log(` Created: ${file.timeCreated}`);
console.log(` Updated: ${file.updated}`);
google.auth.getApplicationDefault(function (err, authClient, projectId) {
if (err) {
throw err;
}
console.log(projectId);
const dataflow = google.dataflow({ version: 'v1b3', auth: authClient });
console.log(`gs://${file.bucket}/${file.name}`);
dataflow.projects.templates.create({
projectId: projectId,
resource: {
parameters: {
inputFile: `gs://${file.bucket}/${file.name}`
},
jobName: 'cloud-fn-beam-test',
gcsPath: 'gs://jaison/templates/ApacheBeamTemplate'
}
}, function(err, response) {
if (err) {
console.error("problem running dataflow template, error was: ", err);
}
console.log("Dataflow template response: ", response);
callback();
});
});
callback();
};

Related

Error uploading file: TypeError: source is not async iterable

Is there an issue with IPFS. nothing works
I get this ERROR when running ipfs daemon
2022-11-05T01:21:38.715+0100 ERROR tcp-tpt tcp/tcp.go:59 failed set keepalive period {"error": "set tcp4 192.168.1.6:4001->34.211.25.159:41218: setsockopt: invalid argument"}
import * as IPFS from 'ipfs-core'
const setURI = async (file, callback) => {
try {
const ipfs = await IPFS.create()
const { cid } = await ipfs.add(file)
console.info(cid)
// callback(url)
} catch (error) {
console.error('Error uploading file: ', error)
}
}
export {
setURI
}
This is a bug in the latest version of Babel: https://github.com/babel/babel/issues/15145
A workaround is to downgrade by adding this to package.json:
"resolutions": {
"#babel/plugin-transform-async-to-generator": "7.17.12"
},

TypeError: Cannot read properties of undefined (reading 'filename') in multer

I have a very similar problem with respect to this fellow community contributor. How do i produce multer error message in my postman I followed through the comments made by other users and it was successful! However, when i tried to post a image that is a jpg formatted image( which i managed to do before the editing), it now fails and state that TypeError: Cannot read property 'filename' of undefined.
// multer.js file
successfully setup multer
**please tell me why this error comes on my code and give me a solution**
const multer = require('multer');
const storage = multer.diskStorage({
fileSize: 1024*1024*2,
destination: function (req, file, cb) {
cb(null, './uploads')
},
filename: function (req, file, cb) {
const uniqueSuffix = Date.now() + '-' + Math.round(Math.random() * 1E9)
cb(null, file.fieldname + '-' + uniqueSuffix)
}
})
const filter = function (req, file, cb) {
if (file.mimetype === 'image/jpeg' || file.mimetype === 'image/png') {
cb(null, true);
} else {
cb(new Error('unsupported files'), false)
}
}
var upload = multer({
storage: storage,
limits: {
fileSize: 1024 * 1024 * 5
},
fileFilter : filter
});
module.exports = upload;
//controller.js file
//create function
here's my logic to create a new user
exports.create = (req, res, next) => {
if (!req.body) {
res.status(400).send({ message: "content cannot be empty !!" })
return
}
let data = { name: req.body.name, description: req.body.description, brand_url:
req.body.brand_url, image_file: req.body.file.filename }; getting error here
let sql = "INSERT INTO influencer SET ?";
db.query(sql, data, (err, results) => {
if (err) throw err;
console.log('data inserted succesfully')
res.redirect('/admin');
});
}
//api.js file
//post API
router.post('/api/create', upload.single('image') ,controller.create) //when I am
sending file its throw back error undefined filename
Please make sure you have added enctype="multipart/form-data"
<form action="/api/create" enctype="multipart/form-data" method="post">
I have tested the codes & found the problem.
exports.create = (req, res, next) => {
if (!req.body) {
res.status(400).send({ message: "content cannot be empty !!" })
return
}
let data = {
name: req.body.name,
description: req.body.description,
brand_url: req.body.brand_url,
image_file: req.file.filename
}; // Remove "body", I have tested, it works well.
let sql = "INSERT INTO influencer SET ?";
db.query(sql, data, (err, results) => {
if (err) throw err;
console.log('data inserted succesfully')
res.redirect('/admin');
});
}

req.file.stream is undefined

I'm having troubles with node 16 and ES6. I'm trying to make a upload file controller but i'm stuck with req.file.stream which is undefined
I'm using multer to handle upload files.
The first issue was __dirname undefined that I was able to fix with path and New Url.
The error I got with pipeline
node:internal/process/promises:246
triggerUncaughtException(err, true /* fromPromise */);
^
TypeError [ERR_INVALID_ARG_TYPE]: The "source" argument must be of type function or an instance of Stream, Iterable, or AsyncIterable. Received undefined
my userRoutes.js
import express from "express";
import { signin, signup, logout } from "../Controller/AuthController.js";
import {
getUsers,
getUser,
updateUser,
deleteUser,
follow,
unfollow,
} from "../Controller/UserController.js";
import { upload } from "../Controller/UploadController.js";
import multer from "multer";
const router = express.Router();
// Auth
router.post("/signin", signin);
router.post("/signup", signup);
router.post("/logout", logout);
// users
router.get("/", getUsers);
router.get("/:id", getUser);
router.patch("/:id", updateUser);
router.delete("/:id", deleteUser);
router.patch("/follow/:id", follow);
router.patch("/unfollow/:id", unfollow);
// upload
router.post("/upload", multer().single("file"), upload);
export default router;
And my UploadController.js
import fs from "fs";
import { promisify } from "util";
import stream from "stream";
const pipeline = promisify(stream.pipeline);
// const { uploadErrors } = require("../utils/errors.utils");
import path from "path";
const __dirname = path.dirname(new URL(import.meta.url).pathname);
export const upload = async (req, res) => {
try {
// console.log(req.file);
console.log(__dirname);
if (
!req.file.mimetype == "image/jpg" ||
!req.file.mimetype == "image/png" ||
!req.file.mimetype == "image/jpeg"
)
throw Error("invalid file");
if (req.file.size > 2818128) throw Error("max size");
} catch (err) {
const errors = uploadErrors(err);
return res.status(201).json({ err });
}
const fileName = req.body.name + ".jpg";
await pipeline(
req.file.stream,
fs.createWriteStream(
`${__dirname}/../client/public/uploads/profil/${fileName}`
)
);
try {
await User.findByIdAndUpdate(
req.body.userId,
{ $set: { picture: "./uploads/profil/" + fileName } },
{ new: true, upsert: true, setDefaultsOnInsert: true },
(err, docs) => {
if (!err) return res.send(docs);
else return res.status(500).send({ message: err });
}
);
} catch (err) {
return res.status(500).send({ message: err });
}
};
Multer gives you the file as a Buffer, not a Stream. req.file.stream is not valid property, but req.file.buffer is: https://github.com/expressjs/multer#file-information.
From the look of your code, you're trying to save the file on disk. You can use multer's DiskStorage for that. Create a storage instance and pass it to the multer instance as a configuration:
const storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, `${__dirname}/../client/public/uploads/profil/`);
},
filename: function (req, file, cb) {
cb(null, req.body.name + '.jpg');
},
});
const upload = multer({ storage });
router.post('/upload', upload.single('file'), upload);
Have a look at this free Request Parsing in Node.js Guide for working with file uploads in Node.js.
if you want to use req.file.stream, you will need to install this version of multer:
npm install --save multer#^2.0.0-rc.1
and your code will work perfectly, just change your req.file.mimetype to req.file.detectedMimeType !!

Cloud Function to Trigger DataPrep Dataflow Job

I have a small pipeline im trying to execute:
file placed into GCS Bucket > 2. Cloud Function triggers Dataflow job when file is placed in GCS bucket (not working) > 3. Writes to Big Query table (this part working)
I've created a Dataflow job through Dataprep as it has nice UI to do all my transformations before writing to a BigQuery table (writing to BigQuery works fine), and the Cloud function triggers when a file is uploaded to the GCS bucket. However the Cloud Function doesn't trigger the Dataflow job (which I wrote in Dataprep).
Please, have a look at my sample code below of my Cloud Function, if I can get any pointers as to why the Dataflow job is not triggering.
/**
* Triggered from a message on a Cloud Storage bucket.
*
* #param {!Object} event The Cloud Functions event.
* #param {!Function} The callback function.
*/
exports.processFile = (event, callback) => {
console.log('Processing file: ' + event.data.name);
callback();
const google = require('googleapis');
exports.CF_GCStoDataFlow_v2 = function(event, callback) {
const file = event.data;
if (file.resourceState === 'exists' && file.name) {
google.auth.getApplicationDefault(function (err, authClient, projectId) {
if (err) {
throw err;
}
if (authClient.createScopedRequired && authClient.createScopedRequired()) {
authClient = authClient.createScoped([
'https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/userinfo.email'
]);
}
const dataflow = google.dataflow({ version: 'v1b3', auth: authClient });
dataflow.projects.templates.create({
projectId: projectId,
resource: {
parameters: {
inputFile: `gs://${file.bucket}/${file.name}`,
outputFile: `gs://${file.bucket}/${file.name}`
},
jobName: 'cloud-dataprep-csvtobq-v2-281345',
gcsPath: 'gs://mygcstest-pipeline-staging/temp/'
}
}, function(err, response) {
if (err) {
console.error("problem running dataflow template, error was: ", err);
}
console.log("Dataflow template response: ", response);
callback();
});
});
}
};
};
This snippet may help, it uses a different method of the dataflow api (launch), it worked for me, be aware you need to specify template's url and also check the metadata file (you can find it in the same directory as the template when executed through the dataprep interface) file you are including the right parameters
dataflow.projects.templates.launch({
projectId: projectId,
location: location,
gcsPath: jobTemplateUrl,
resource: {
parameters: {
inputLocations : `{"location1" :"gs://${file.bucket}/${file.name}"}`,
outputLocations: `{"location1" : "gs://${destination.bucket}/${destination.name}"}"}`,
},
environment: {
tempLocation: `gs://${destination.bucket}/${destination.tempFolder}`,
zone: "us-central1-f"
},
jobName: 'my-job-name',
}
}
Have you submitted you Dataproc job? Has it started running?
The below documentation can give some idea to get started!
https://cloud.google.com/dataproc/docs/concepts/jobs/life-of-a-job
Looks like you are putting CF_GCStoDataFlow_v2 inside processFile, so the Dataflow part of the code is not executing.
Your function should look like this:
/**
* Triggered from a message on a Cloud Storage bucket.
*
* #param {!Object} event The Cloud Functions event.
* #param {!Function} The callback function.
*/
exports.CF_GCStoDataFlow_v2 = (event, callback) => {
const google = require('googleapis');
if (file.resourceState === 'exists' && file.name) {
google.auth.getApplicationDefault(function (err, authClient, projectId) {
if (err) {
throw err;
}
if (authClient.createScopedRequired && authClient.createScopedRequired()) {
authClient = authClient.createScoped([
'https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/userinfo.email'
]);
}
const dataflow = google.dataflow({ version: 'v1b3', auth: authClient });
dataflow.projects.templates.create({
projectId: projectId,
resource: {
parameters: {
inputFile: `gs://${file.bucket}/${file.name}`,
outputFile: `gs://${file.bucket}/${file.name}`
},
jobName: '<JOB_NAME>',
gcsPath: '<BUCKET_NAME>'
}
}, function(err, response) {
if (err) {
console.error("problem running dataflow template, error was: ", err);
}
console.log("Dataflow template response: ", response);
callback();
});
});
}
callback();
};
Make sure you change the value under “Function to execute” to CF_GCStoDataFlow_v2

How to Add API Key to Usage Plan in AWS API Getaway

I have a problem with creating API Key associated with an Usage Plan in AWS API Getaway (using AWS SDK for node.js).
In AWS Console you can attach API Key to Usage Plan via this button:
However I could not find a similar function in AWS SDK documentation
This code do the magic:
var params = {
keyId: 'STRING_VALUE', /* required */
keyType: 'STRING_VALUE', /* required */
usagePlanId: 'STRING_VALUE' /* required */
};
apigateway.createUsagePlanKey(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response
});
What I was missing was that keyType must be "API_KEY"
This is a full example of creating an API Key and assigning it to a usage plan.
async function createApiKey(apikey_name, usage_plan="free") {
const usage_plans = {
free: "kotc0f", // usage plan IDs
basic: "ju5fea"
};
if (!usage_plan || !(usage_plan in usage_plans)) {
console.log(usage_plan + " usage plan does not exist");
return false;
}
var params = {
name: apikey_name,
description: "Created via API on " + (new Date).toISOString(),
enabled: true,
generateDistinctId: true
};
let api_key = await new Promise((resolve) => {
apigateway.createApiKey(params, function (err, data) {
if (err) {
console.log("ERROR", err, err.stack); // an error occurred
resolve(false);
}
else {
resolve(data);
}
});
});
if (!api_key) return false;
params = {
keyId: api_key.id,
keyType: "API_KEY",
usagePlanId: usage_plans[usage_plan]
};
await new Promise((resolve) => {
apigateway.createUsagePlanKey(params, function(err, data) {
if (err) {
console.log(err, err.stack); // an error occurred
resolve(false);
}
else {
resolve(data);
}
});
});
return api_key;
}