Download Images from google drive using API in node.js - google-drive-api

Hi #iansedano, I wanted the code for achieving this. Below is my code :
// [START drive_quickstart]
const fs = require('fs');
const path = require('path');
const readline = require('readline');
const {google} = require('googleapis');
// If modifying these scopes, delete token.json.
const SCOPES = ['https://www.googleapis.com/auth/drive'];
// The file token.json stores the user's access and refresh tokens, and is
// created automatically when the authorization flow completes for the first
// time.
const TOKEN_PATH = 'token.json';
// Load client secrets from a local file.
let credPath = path.join(__dirname,'../../credentials.json');
fs.readFile(credPath, (err, content) => {
if (err) return console.log('Error loading client secret file:', err);
// Authorize a client with credentials, then call the Google Drive API.
authorize(JSON.parse(content), getFiles);
});
/**
* Create an OAuth2 client with the given credentials, and then execute the
* given callback function.
* #param {Object} credentials The authorization client credentials.
* #param {function} callback The callback to call with the authorized client.
*/
function authorize(credentials) {
const {client_secret, client_id, redirect_uris} = credentials.installed;
const oAuth2Client = new google.auth.OAuth2(
client_id, client_secret, redirect_uris[0]);
// Check if we have previously stored a token.
fs.readFile(TOKEN_PATH, (err, token) => {
if (err) return getAccessToken(oAuth2Client, callback);
oAuth2Client.setCredentials(JSON.parse(token));
});
}
/**
* Get and store new token after prompting for user authorization, and then
* execute the given callback with the authorized OAuth2 client.
* #param {google.auth.OAuth2} oAuth2Client The OAuth2 client to get token for.
* #param {getEventsCallback} callback The callback for the authorized client.
*/
function getAccessToken(oAuth2Client, callback) {
const authUrl = oAuth2Client.generateAuthUrl({
access_type: 'offline',
scope: SCOPES,
});
console.log('Authorize this app by visiting this url:', authUrl);
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
rl.question('Enter the code from that page here: ', (code) => {
rl.close();
oAuth2Client.getToken(code, (err, token) => {
if (err) return console.error('Error retrieving access token', err);
oAuth2Client.setCredentials(token);
// Store the token to disk for later program executions
fs.writeFile(TOKEN_PATH, JSON.stringify(token), (err) => {
if (err) return console.error(err);
console.log('Token stored to', TOKEN_PATH);
});
callback(oAuth2Client);
});
});
}
/**
* Lists the names and IDs of up to 10 files.
* #param {google.auth.OAuth2} auth An authorized OAuth2 client.
* #param fileId
* #param fileName
*/
function getFiles(auth) {
const drive = google.drive({version: 'v3', auth});
var filePath = path.join(__dirname, './../../images/photo.png');
//var filePath = path.join(__dirname,'./images/'+fileName+'.png');
var dest = fs.createWriteStream(filePath);
drive.files.get({fileId: '1hEKDGM4OoLQhhpSpqSxaMkrz0WmByQ0B', alt: 'media'}, {responseType: 'stream'},
function (err, res) {
res.data
.on('end', () => {
console.log('Done');
})
.on('error', err => {
console.log('Error', err);
})
.pipe(dest);
}
);
}
// [END drive_quickstart]
module.exports = {
SCOPES,
getFiles,
};
`Can any one help me download the file from Google drive using API in node.js ?
I have enabled the Google Drive API and created credentials.json from the oAuth2.
I need the code to download the image file using the file id. Please note that there is no web application involved here and I'm creating a Test script using codeceptjs to download the image from google drive and upload in another application.

Related

Restify: socket hangup error when copying a file and querying a database using a promise chain

I am using the restify framework to build a small app that copies an uploaded file from its temporary location to a permanent location and then inserts that new location into a MySQL database. However, when attempting to copy the file and then run the promisified query, the system throws a silent error not caught by the promise chain causing a 502 error on the web server end. A minimal working example is below. This example has been tested and does fail out of the gate.
If one of the steps in the process is removed (copying the file or storing the string in the database), the silent error disappears and API response is sent. However, both steps are needed for later file retrieval.
Main Restify File
const restify = require('restify');
const corsMiddleware = require('restify-cors-middleware');
const cookieParser = require('restify-cookies');
const DataBugsDbCredentials = require('./config/config').appdb;
const fs = require('fs');
const { host, port, name, user, pass } = DataBugsDbCredentials;
const database = new (require('./lib/database'))(host, port, name, user, pass);
const server = restify.createServer({
name: 'insect app'
});
// enable options response in restify (anger) -- this is so stupid!! (anger)
const cors = corsMiddleware({});
server.pre(cors.preflight);
server.use(cors.actual);
// set query and body parsing for access to this information on requests
server.use(restify.plugins.acceptParser(server.acceptable));
server.use(restify.plugins.queryParser({ mapParams: true }));
server.use(restify.plugins.bodyParser({ mapParams: true }));
server.use(cookieParser.parse);
server.post('/test', (req, res, next) => {
const { files } = req;
let temporaryFile = files['file'].path;
let permanentLocation = '/srv/www/domain.com/permanent_location';
// copy file
return fs.promises.copyFile(temporaryFile, permanentLocation)
// insert into database
.then(() => database.query(
`insert into Specimen (
CollectorId,
HumanReadableId,
FileLocation
) values (
1,
'AAA004',
${permanentLocation}
)`
))
.then(() => {
console.log('success!!!')
return res.send('success!')
})
.catch(error => {
console.error(error)
return res.send(error);
});
});
./lib/database.js
'use strict';
const mysql = require('mysql2');
class Database {
constructor(host, port, name, user, pass) {
this.connection = this.connect(host, port, name, user, pass);
this.query = this.query.bind(this);
}
/**
* Connects to a MySQL-compatible database, returning the connection object for later use
* #param {String} host The host of the database connection
* #param {Number} port The port for connecting to the database
* #param {String} name The name of the database to connect to
* #param {String} user The user name for the database
* #param {String} pass The password for the database user
* #return {Object} The database connection object
*/
connect(host, port, name, user, pass) {
let connection = mysql.createPool({
connectionLimit : 20,
host : host,
port : port,
user : user,
password : pass,
database : name,
// debug : true
});
connection.on('error', err => console.error(err));
return connection;
}
/**
* Promisifies database queries for easier handling
* #param {String} queryString String representing a database query
* #return {Promise} The results of the query
*/
query(queryString) {
// console.log('querying database');
return new Promise((resolve, reject) => {
// console.log('query promise before query, resolve', resolve);
// console.log('query promise before query, reject', reject);
// console.log('query string:', queryString)
this.connection.query(queryString, (error, results, fields) => {
console.log('query callback', queryString);
console.error('query error', error, queryString);
if (error) {
// console.error('query error', error);
reject(error);
} else {
// console.log('query results', results);
resolve(results);
}
});
});
}
}
module.exports = Database;
./testfile.js (used to quickly query the restify API)
'use strict';
const fs = require('fs');
const request = require('request');
let req = request.post({
url: 'https://api.databugs.net/test',
}, (error, res, addInsectBody) => {
if (error) {
console.error(error);
} else {
console.log('addInsectBody:', addInsectBody);
}
});
let form = req.form();
form.append('file', fs.createReadStream('butterfly.jpg'), {
filename: 'butterfly.jpg',
contentType: 'multipart/form-data'
});
If the request is made to the localhost, then an 'ECONNRESET' error is thrown as shown below:
Error: socket hang up
at connResetException (internal/errors.js:570:14)
at Socket.socketOnEnd (_http_client.js:440:23)
at Socket.emit (events.js:215:7)
at endReadableNT (_stream_readable.js:1183:12)
at processTicksAndRejections (internal/process/task_queues.js:80:21) {
code: 'ECONNRESET'
}
This error is only thrown if both the database and the file I/O are both present in the promise chain. Additionally, the error does not occur if the database request is made first with the file I/O occurring second; however, another rapid request to the server will immediately lead to the 'ECONNRESET' error.
I feel as though I should edit this answer, despite the solution revealing a rookie mistake, in the hopes that it may help someone else. I will keep the previous answer below for full transparency, but please not that it is incorrect.
Correct Answer
TL;DR
PM2 restarted the NodeJS service with each new file submitted to and saved by the API. The fix: tell PM2 to ignore the directory that stored the API's files. See this answer
Long Answer
While the OP did not mention it, my setup utilized PM2 as the NodeJS service manager for the application, and I had turned on the 'watch & reload' feature that restarted the service with each file change. Unfortunately, I had forgotten to instruct PM2 to ignore file changes in the child directory storing new files submitted through the API. As a result, each new file submitted into the API caused the service to reload. If more instructions remained to be executed after storing the file, they were terminated as PM2 restarted the service. The 502 gateway error was a simple result of the NodeJS service becoming temporarily unavailable during this time.
Changing the database transactions to occur first (as incorrectly described as a solution below) simply insured that the service restart occurred at the very end when no other instructions were pending.
Previous Incorrect Answer
The only solution that I have found thus far is to switch the file I/O and the database query so that the file I/O operation comes last. Additionally, changing the file I/O operation to rename rather than copy the file prevents rapidly successive API queries from throwing the same error (having a database query rapidly come after any file I/O operation that is not a rename seems to be the problem). Sadly, I do not have a reasonable explanation for the socket hang up in the OP, but below is the code from the OP modified to make it functional.
const restify = require('restify');
const corsMiddleware = require('restify-cors-middleware');
const cookieParser = require('restify-cookies');
const DataBugsDbCredentials = require('./config/config').appdb;
const fs = require('fs');
const { host, port, name, user, pass } = DataBugsDbCredentials;
const database = new (require('./lib/database'))(host, port, name, user, pass);
const server = restify.createServer({
name: 'insect app'
});
// enable options response in restify (anger) -- this is so stupid!! (anger)
const cors = corsMiddleware({});
server.pre(cors.preflight);
server.use(cors.actual);
// set query and body parsing for access to this information on requests
server.use(restify.plugins.acceptParser(server.acceptable));
server.use(restify.plugins.queryParser({ mapParams: true }));
server.use(restify.plugins.bodyParser({ mapParams: true }));
server.use(cookieParser.parse);
server.post('/test', (req, res, next) => {
const { files } = req;
let temporaryFile = files['file'].path;
let permanentLocation = '/srv/www/domain.com/permanent_location';
// copy file
// insert into database
return database.query(
`insert into Specimen (
CollectorId,
HumanReadableId,
FileLocation
) values (
1,
'AAA004',
${permanentLocation}
)`
)
.then(() => fs.promises.rename(temporaryFile, permanentLocation))
.then(() => {
console.log('success!!!')
return res.send('success!')
})
.catch(error => {
console.error(error)
return res.send(error);
});
});
You did not handle the database promise in then and catch -
Main Restify File
const restify = require('restify');
const corsMiddleware = require('restify-cors-middleware');
const cookieParser = require('restify-cookies');
const DataBugsDbCredentials = require('./config/config').appdb;
const fs = require('fs');
const { host, port, name, user, pass } = DataBugsDbCredentials;
const database = new (require('./lib/database'))(host, port, name, user, pass);
const server = restify.createServer({
name: 'insect app'
});
// enable options response in restify (anger) -- this is so stupid!! (anger)
const cors = corsMiddleware({});
server.pre(cors.preflight);
server.use(cors.actual);
// set query and body parsing for access to this information on requests
server.use(restify.plugins.acceptParser(server.acceptable));
server.use(restify.plugins.queryParser({ mapParams: true }));
server.use(restify.plugins.bodyParser({ mapParams: true }));
server.use(cookieParser.parse);
server.post('/test', (req, res, next) => {
const { files } = req;
let temporaryFile = files['file'].path;
let permanentLocation = '/srv/www/domain.com/permanent_location';
// copy file
return fs.promises.copyFile(temporaryFile, permanentLocation)
// insert into database
.then(() =>{
// Your database class instance query method returns promise
database.query(
`insert into Specimen (
CollectorId,
HumanReadableId,
FileLocation
) values (
1,
'AAA004',
${permanentLocation}
)`
).then(() => {
console.log('success!!!')
return res.send('success!')
})
.catch(error => {
console.error('Inner database promise error', error)
return res.send(error);
});
}).catch(error => {
console.error('Outer fs.copyfile promise error', error)
return res.send(error);
})
});

Node Function not returning JSON object

This is my Library function file named verifyToken.js
require('dotenv').config();
const CognitoExpress = require("cognito-express");
//Initializing CognitoExpress constructor
const cognitoExpress = new CognitoExpress({
cognitoUserPoolId: process.env.USER_POOL_ID, // User Pool id (created via Console->Cognito Manage User Pool)
region: process.env.REGION, //User Pool Region
tokenUse: "id", //Possible Values: access | id
tokenExpiration: 3600000 //Up to default expiration of 1 hour (3600000 ms)
});
//Our middleware that authenticates all APIs under our 'authenticatedRoute' Router
module.exports = function(){
this.verifyToken=(function(token, req, res) {
//I'm passing in the id token in header under key Token
let TokenFromClient = token;
//Fail if token not present in header.
if (!TokenFromClient) return "Token missing from header";
cognitoExpress.validate(TokenFromClient, function(err, response) {
//If API is not authenticated, Return 401 with error message.
if (err) {
return res.send({success:false, msg:"Token Authenication Error", Error:err});
}
//Else API has been authenticated. Proceed.
return res.send({success:true, msg:"Token Authenication result", result: response});
});
});
}
I am calling this function from controller
var verify_tokenController = {};
//Verify AWS token recieved through client header by calling verifyToken Library function
verify_tokenController.verify = function(req, res) {
//Import verifyToken Library function
require('../library/verifyToken')();
var auth = verifyToken(req.header('Token'), req, res);
console.log(auth);
};
module.exports = verify_tokenController;
But I am always receiving undefined at console. How to return err or response back to the calling controller. Both of the return variable are JSON object type.
It is becuase cognitoExpress.validate is an async operation. it will run and the callback of this function will run. But by the time this all happens. Your following line
console.log(auth); gets hit already and you see undefined becuase you are actually not returning anything from verifyToken method at that time.
This how javascript and async operations work.
You can either use the callback approach but that's not what I would recommend. So I'm modifying your code with promises and see if it works for you.
Your verifyToken.js should look as follow:
require('dotenv').config();
const CognitoExpress = require("cognito-express");
//Initializing CognitoExpress constructor
const cognitoExpress = new CognitoExpress({
cognitoUserPoolId: process.env.USER_POOL_ID, // User Pool id (created via Console->Cognito Manage User Pool)
region: process.env.REGION, //User Pool Region
tokenUse: "id", //Possible Values: access | id
tokenExpiration: 3600000 //Up to default expiration of 1 hour (3600000 ms)
});
//Our middleware that authenticates all APIs under our 'authenticatedRoute' Router
module.exports = function(){
this.verifyToken=(function(token, req, res) {
return new Promise(function(resolve, reject) {
//I'm passing in the id token in header under key Token
let TokenFromClient = token;
//Fail if token not present in header.
if (!TokenFromClient) return "Token missing from header";
cognitoExpress.validate(TokenFromClient, function(err, response) {
//If API is not authenticated, Return 401 with error message.
if (err) {
reject(err);
} else {
//Else API has been authenticated. Proceed.
resolve(response);
}
});
});
});
}
Your controller should look like as follow:
var verify_tokenController = {};
//Verify AWS token recieved through client header by calling verifyToken Library function
verify_tokenController.verify = function(req, res) {
//Import verifyToken Library function
require('../library/verifyToken')();
verifyToken(req.header('Token'), req, res)
.then(function(response){
console.log(resposne, 'response')
res.send({success:true, msg:"Token Authenication result", result: response});
})
.catch(function(err) {
res.send({success:false, msg:"Token Authenication Error", Error:err});
});
};
module.exports = verify_tokenController;
Try the above solution.

From ES2018 async/await to ES2015 Promises . ... timeout

I am trying to convert an ES2018 async function into an ES2015 (ES6) function, but I get a timeout, guess my ES2015 version is wrong...but where?
ES2018 version
async function connectGoogleAPI () {
// Create a new JWT client using the key file downloaded from the Google Developer Console
const client = await google.auth.getClient({
keyFile: path.join(__dirname, 'service-key.json'),
scopes: 'https://www.googleapis.com/auth/drive.readonly'
});
// Obtain a new drive client, making sure you pass along the auth client
const drive = google.drive({
version: 'v2',
auth: client
});
// Make an authorized request to list Drive files.
const res = await drive.files.list();
console.log(res.data);
return res.data;
}
ES2015 version w/Promise
function connectGoogleAPI () {
return new Promise((resolve, reject) => {
const authClient = google.auth.getClient({
keyFile: path.join(__dirname, 'service-key.json'),
scopes: 'https://www.googleapis.com/auth/drive.readonly'
});
google.drive({
version: 'v2',
auth: authClient
}), (err, response) => {
if(err) {
reject(err);
} else {
resolve(response);
}
}
});
}
You haven't translated the await of getClient. Remember, await = then (roughly). You're also falling prey to the promise creation anti-pattern: When you already have a promise (from getClient), you almost never need to use new Promise. Just use then.
Here's an example with each await converted into a then, using the chain for the subsequent operations:
function connectGoogleAPI () {
// Create a new JWT client using the key file downloaded from the Google Developer Console
return google.auth.getClient({
keyFile: path.join(__dirname, 'service-key.json'),
scopes: 'https://www.googleapis.com/auth/drive.readonly'
}).then(client => {
// Obtain a new drive client, making sure you pass along the auth client
const drive = google.drive({
version: 'v2',
auth: client
});
// Make an authorized request to list Drive files.
return drive.files.list();
}).then(res => {
console.log(res.data);
return res.data;
});
}
That last part can be just
}).then(res => res.data);
...if you remove the console.log. (Or we could abuse the comma operator.)
Notes:
Each await needs to become a then handler (there were two in the original, awaiting getClient and drive.files.list)
In a then handler, if you have to wait for another promise (such as the one from drive.files.list) you typically return it from the handler, and then use another handler to handle that result (which is why I have return drive.files.list() and then a separate handler for converting res to res.data)
Re that second point: Sometimes nesting is appropriate, such as when you need to combine the result with some intermediate value you only have with in your then handler. (For instance, if we wanted to combine res.data with client.) But generally, prefer not to nest.

Cloud Function to Trigger DataPrep Dataflow Job

I have a small pipeline im trying to execute:
file placed into GCS Bucket > 2. Cloud Function triggers Dataflow job when file is placed in GCS bucket (not working) > 3. Writes to Big Query table (this part working)
I've created a Dataflow job through Dataprep as it has nice UI to do all my transformations before writing to a BigQuery table (writing to BigQuery works fine), and the Cloud function triggers when a file is uploaded to the GCS bucket. However the Cloud Function doesn't trigger the Dataflow job (which I wrote in Dataprep).
Please, have a look at my sample code below of my Cloud Function, if I can get any pointers as to why the Dataflow job is not triggering.
/**
* Triggered from a message on a Cloud Storage bucket.
*
* #param {!Object} event The Cloud Functions event.
* #param {!Function} The callback function.
*/
exports.processFile = (event, callback) => {
console.log('Processing file: ' + event.data.name);
callback();
const google = require('googleapis');
exports.CF_GCStoDataFlow_v2 = function(event, callback) {
const file = event.data;
if (file.resourceState === 'exists' && file.name) {
google.auth.getApplicationDefault(function (err, authClient, projectId) {
if (err) {
throw err;
}
if (authClient.createScopedRequired && authClient.createScopedRequired()) {
authClient = authClient.createScoped([
'https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/userinfo.email'
]);
}
const dataflow = google.dataflow({ version: 'v1b3', auth: authClient });
dataflow.projects.templates.create({
projectId: projectId,
resource: {
parameters: {
inputFile: `gs://${file.bucket}/${file.name}`,
outputFile: `gs://${file.bucket}/${file.name}`
},
jobName: 'cloud-dataprep-csvtobq-v2-281345',
gcsPath: 'gs://mygcstest-pipeline-staging/temp/'
}
}, function(err, response) {
if (err) {
console.error("problem running dataflow template, error was: ", err);
}
console.log("Dataflow template response: ", response);
callback();
});
});
}
};
};
This snippet may help, it uses a different method of the dataflow api (launch), it worked for me, be aware you need to specify template's url and also check the metadata file (you can find it in the same directory as the template when executed through the dataprep interface) file you are including the right parameters
dataflow.projects.templates.launch({
projectId: projectId,
location: location,
gcsPath: jobTemplateUrl,
resource: {
parameters: {
inputLocations : `{"location1" :"gs://${file.bucket}/${file.name}"}`,
outputLocations: `{"location1" : "gs://${destination.bucket}/${destination.name}"}"}`,
},
environment: {
tempLocation: `gs://${destination.bucket}/${destination.tempFolder}`,
zone: "us-central1-f"
},
jobName: 'my-job-name',
}
}
Have you submitted you Dataproc job? Has it started running?
The below documentation can give some idea to get started!
https://cloud.google.com/dataproc/docs/concepts/jobs/life-of-a-job
Looks like you are putting CF_GCStoDataFlow_v2 inside processFile, so the Dataflow part of the code is not executing.
Your function should look like this:
/**
* Triggered from a message on a Cloud Storage bucket.
*
* #param {!Object} event The Cloud Functions event.
* #param {!Function} The callback function.
*/
exports.CF_GCStoDataFlow_v2 = (event, callback) => {
const google = require('googleapis');
if (file.resourceState === 'exists' && file.name) {
google.auth.getApplicationDefault(function (err, authClient, projectId) {
if (err) {
throw err;
}
if (authClient.createScopedRequired && authClient.createScopedRequired()) {
authClient = authClient.createScoped([
'https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/userinfo.email'
]);
}
const dataflow = google.dataflow({ version: 'v1b3', auth: authClient });
dataflow.projects.templates.create({
projectId: projectId,
resource: {
parameters: {
inputFile: `gs://${file.bucket}/${file.name}`,
outputFile: `gs://${file.bucket}/${file.name}`
},
jobName: '<JOB_NAME>',
gcsPath: '<BUCKET_NAME>'
}
}, function(err, response) {
if (err) {
console.error("problem running dataflow template, error was: ", err);
}
console.log("Dataflow template response: ", response);
callback();
});
});
}
callback();
};
Make sure you change the value under “Function to execute” to CF_GCStoDataFlow_v2

How to upload image file via HTTP Google Cloud Function?

I have read tutorials on how to upload the image to a bucket and do post processing via background function. But my requirement is to upload the image, do post processing and return the result immediately via HTTP function. Please let me know if this is the correct way to do or not as I didn't get much material online on this. Here is how I went about it:
HTTP Cloud function:
exports.uploadImage = function (req, res){
var file = req.body.file;
uploadSomewhere(file)(); < post-processing code which is working fine >
UI form:
<script src="http://ajax.googleapis.com/ajax/libs/jquery/1.7/jquery.js"></script>
<script src="http://malsup.github.com/jquery.form.js"></script>
<form id="myForm" action="<cloud_function_url>/uploadImage" method="post">
<label for="file">Choose file to upload</label>
<input type="file" id="file" name="file" multiple>
<input type="submit" value="Submit" />
</form>
<script>
$(document).ready(function() {
$('#myForm').ajaxForm(function() {
});
});
</script>
The problem is, after I deployed the function, when I upload the image from the folder where function is present, image gets uploaded. But if I upload the image from any other location, it returns me error:
Error: Error in uploading image file .... - Error: ENOENT: no such file or directory, open '....'
Please let me know what am I doing wrong or if you need more information.
There is sample code for uploading files directly to Cloud Functions here: https://cloud.google.com/functions/docs/writing/http#multipart_data_and_file_uploads, but be aware there is a file size limit when using this approach (10MB). If you have files larger than that, I recommend you use the Cloud Storage approach and use something like the Firebase Realtime Database to manage the state on the client.
So, you generate the signed upload URL using a Cloud Function, then use the RTDB to track progress for the client. Return the URL and a reference to the location in the DB where you're going to track progress. The client would watch this location for updates. The client then uploads the file to Cloud Storage and the second function is triggered. After post processing it updates the RTDB which pushes the update down to the client. From the client's perspective this is all synchronous, but it's actually a series of async operations with state coalescing in the database.
Here's the sample code for inline updates taken from the docs if you're OK with file sizes < 10MB:
/**
* Parses a 'multipart/form-data' upload request
*
* #param {Object} req Cloud Function request context.
* #param {Object} res Cloud Function response context.
*/
const path = require('path');
const os = require('os');
const fs = require('fs');
// Node.js doesn't have a built-in multipart/form-data parsing library.
// Instead, we can use the 'busboy' library from NPM to parse these requests.
const Busboy = require('busboy');
exports.uploadFile = (req, res) => {
if (req.method === 'POST') {
const busboy = new Busboy({ headers: req.headers });
const tmpdir = os.tmpdir();
// This object will accumulate all the fields, keyed by their name
const fields = {};
// This object will accumulate all the uploaded files, keyed by their name.
const uploads = {};
// This code will process each non-file field in the form.
busboy.on('field', (fieldname, val) => {
// TODO(developer): Process submitted field values here
console.log(`Processed field ${fieldname}: ${val}.`);
fields[fieldname] = val;
});
let fileWrites = [];
// This code will process each file uploaded.
busboy.on('file', (fieldname, file, filename) => {
// Note: os.tmpdir() points to an in-memory file system on GCF
// Thus, any files in it must fit in the instance's memory.
console.log(`Processed file ${filename}`);
const filepath = path.join(tmpdir, filename);
uploads[fieldname] = filepath;
const writeStream = fs.createWriteStream(filepath);
file.pipe(writeStream);
// File was processed by Busboy; wait for it to be written to disk.
const promise = new Promise((resolve, reject) => {
file.on('end', () => {
writeStream.end();
});
writeStream.on('finish', resolve);
writeStream.on('error', reject);
});
fileWrites.push(promise);
});
// Triggered once all uploaded files are processed by Busboy.
// We still need to wait for the disk writes (saves) to complete.
busboy.on('finish', () => {
Promise.all(fileWrites)
.then(() => {
// TODO(developer): Process saved files here
for (const name in uploads) {
const file = uploads[name];
fs.unlinkSync(file);
}
res.send();
});
});
busboy.end(req.rawBody);
} else {
// Return a "method not allowed" error
res.status(405).end();
}
};
Here is a fully working function file
const vision = require('#google-cloud/vision')({
projectId: "pid",
keyfileName: 'keyfile.json'
});
// The Cloud Functions for Firebase SDK to create Cloud Functions and setup triggers.
const functions = require('firebase-functions');
// The Firebase Admin SDK to access the Firebase Realtime Database.
const admin = require('firebase-admin');
admin.initializeApp();
// Create the Firebase reference to store our image data
const db = admin.database();
const { Storage } = require('#google-cloud/storage');
// Your Google Cloud Platform project ID
const projectId = 'pid';
// Creates a client
const storage = new Storage({
projectId: projectId,
});
/**
* Parses a 'multipart/form-data' upload request
*
* #param {Object} req Cloud Function request context.
* #param {Object} res Cloud Function response context.
*/
const path = require('path');
const os = require('os');
const fs = require('fs');
// Node.js doesn't have a built-in multipart/form-data parsing library.
// Instead, we can use the 'busboy' library from NPM to parse these requests.
const Busboy = require('busboy');
exports.upload = functions.https.onRequest((req, res) => {
if (req.method !== 'POST') {
// Return a "method not allowed" error
return res.status(405).end();
}
const busboy = new Busboy({ headers: req.headers });
const tmpdir = os.tmpdir();
// This object will accumulate all the fields, keyed by their name
const fields = {};
// This object will accumulate all the uploaded files, keyed by their name.
const uploads = {};
// This code will process each non-file field in the form.
busboy.on('field', (fieldname, val) => {
// TODO(developer): Process submitted field values here
console.log(`Processed field ${fieldname}: ${val}.`);
fields[fieldname] = val;
});
const fileWrites = [];
// This code will process each file uploaded.
busboy.on('file', (fieldname, file, filename) => {
// Note: os.tmpdir() points to an in-memory file system on GCF
// Thus, any files in it must fit in the instance's memory.
console.log(`Processed file ${filename}`);
const filepath = path.join(tmpdir, filename);
uploads[fieldname] = filepath;
const writeStream = fs.createWriteStream(filepath);
file.pipe(writeStream);
// File was processed by Busboy; wait for it to be written to disk.
const promise = new Promise((resolve, reject) => {
file.on('end', () => {
writeStream.end();
});
writeStream.on('finish', resolve);
writeStream.on('error', reject);
});
fileWrites.push(promise);
});
// Triggered once all uploaded files are processed by Busboy.
// We still need to wait for the disk writes (saves) to complete.
busboy.on('finish', () => {
Promise.all(fileWrites).then(() => {
// TODO(developer): Process saved files here
for (const name in uploads) {
const file = uploads[name];
async function upload2bucket() {
var bucketName = 'bname'
fileRes = await storage.bucket(bucketName).upload(file);
fs.unlinkSync(file);
console.log('fileRes',fileRes)
console.log(`Finish: Processed file ${file}`);
res.send(fileRes);
}
upload2bucket()
}
});
});
busboy.end(req.rawBody);
});