i am trying to download xls or xlsx file but i am getting error:
{
"error": {
"errors": [
{
"domain": "global",
"reason": "fileNotExportable",
"message": "Export only supports Docs Editors files."
}
],
"code": 403,
"message": "Export only supports Docs Editors files."
}
}
i am using nodejs. is it possible to download xls xlsx files via api ?
code example:
export function getFileFromStream(auth, fileId, mimeType) {
const destPath = `/tmp/${fileId}.xls`;
const dest = fs.createWriteStream(destPath);
return new Promise(async (resolve, reject) => {
const drive = google.drive({version: 'v3', auth});
drive.files.export(
{ fileId, mimeType: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'},
{ responseType: 'stream'},
(errrr, response) => {
response.data
.on('end', function() {
console.log('downloaded')
})
.on('error', function(err) {
console.log('Error during download', err);
})
.pipe(dest);
});
});
}
The files.export method as used in the documentation sample can only be used for downloading Google Workspace documents
For downloading non-Google Workspace documents you need to use the files.get method as in the sample for Download a file stored on Google Drive:
export function getFileFromStream(auth, fileId, mimeType) {
const destPath = `/tmp/${fileId}.xls`;
const dest = fs.createWriteStream(destPath);
return new Promise(async (resolve, reject) => {
const drive = google.drive({version: 'v3', auth});
drive.files.get({
fileId: fileId,
alt: 'media'
})
.on('end', function () {
console.log('Done');
})
.on('error', function (err) {
console.log('Error during download', err);
})
.pipe(dest);
}
Related
I am using mutler to upload image with post request and also string data.
When I upload only the image with Postman or React, it is working, but when I send also the strings, it shows this:
undefined
TypeError: Cannot read properties of undefined (reading 'filename')
The string got saved to MySQL but not the image.
That's my code for the Multer:
let storage = multer.diskStorage({
destination: (req, file, callBack) => {
callBack(null, './public/images/')
},
filename: (req, file, callBack) => {
const mimeExtension = {
'image/jpeg': '.jpeg',
'image/jpg': '.jpg',
'image/png': '.png',
'image/gif': '.gif',
}
callBack(null, file.originalname)
}
})
let upload = multer({
storage: storage,
fileFilter: (req, file, callBack) => {
// console.log(file.mimetype)
if (file.mimetype === 'image/jpeg' ||
file.mimetype === 'image/jpg' ||
file.mimetype === 'image/png' ||
file.mimetype === 'image/gif') {
callBack(null, true);
} else {
callBack(null, false);
req.fileError = 'File format is not valid';
}
}
});
And that's my post request:
router.post('/', upload.single('image'), async (req, res) => {
try {
if (!req.file) {
console.log("You didnt upload a picture for your project");
}
const { title, about_the_project, project_link, user_id } = req.body
const projectnum = await SQL(`INSERT into project(title,about_the_project,project_link,user_id)
VALUES('${title}','${about_the_project}','${project_link}',${user_id}) `)
console.log(projectnum.insertId);
console.log(req.file && req.file.filename)
let imagesrc = 'http://127.0.0.1:5000/images/' + req.file && req.file.filename
await SQL(`UPDATE project SET image = '${imagesrc}' WHERE projectid = ${projectnum.insertId}`)
res.send({ msg: "You add a new project" })
} catch (err) {
console.log(err);
return res.sendStatus(500)
}
})
On React that's the code:
const PostNewProject = async () => {
let formData = new FormData()
formData.append('image', imgsrc)
const res = await fetch(`http://localhost:5000/project`, {
headers: { 'content-type': 'application/json' },
method: "post",
body: JSON.stringify({ title, about_the_project, project_link, languages, user_id }),formData,
// credentials: "include"
})
const data = await res.json()
if (data.err) {
document.getElementById("err").innerHTML = data.err
} else {
console.log(data);
document.getElementById("err").innerHTML = data.msg
}
}
And the input for the image upload
<Input
type='file'
name='image'
sx={{ width: '25ch' }}
onChange={(e) => setImgsrc(e.target.files[0])}
/>
Thank you for your help!
You did not write a comma after the filename and before the callback function in the Multer.
I think you should check you exports too.
I'm having troubles with node 16 and ES6. I'm trying to make a upload file controller but i'm stuck with req.file.stream which is undefined
I'm using multer to handle upload files.
The first issue was __dirname undefined that I was able to fix with path and New Url.
The error I got with pipeline
node:internal/process/promises:246
triggerUncaughtException(err, true /* fromPromise */);
^
TypeError [ERR_INVALID_ARG_TYPE]: The "source" argument must be of type function or an instance of Stream, Iterable, or AsyncIterable. Received undefined
my userRoutes.js
import express from "express";
import { signin, signup, logout } from "../Controller/AuthController.js";
import {
getUsers,
getUser,
updateUser,
deleteUser,
follow,
unfollow,
} from "../Controller/UserController.js";
import { upload } from "../Controller/UploadController.js";
import multer from "multer";
const router = express.Router();
// Auth
router.post("/signin", signin);
router.post("/signup", signup);
router.post("/logout", logout);
// users
router.get("/", getUsers);
router.get("/:id", getUser);
router.patch("/:id", updateUser);
router.delete("/:id", deleteUser);
router.patch("/follow/:id", follow);
router.patch("/unfollow/:id", unfollow);
// upload
router.post("/upload", multer().single("file"), upload);
export default router;
And my UploadController.js
import fs from "fs";
import { promisify } from "util";
import stream from "stream";
const pipeline = promisify(stream.pipeline);
// const { uploadErrors } = require("../utils/errors.utils");
import path from "path";
const __dirname = path.dirname(new URL(import.meta.url).pathname);
export const upload = async (req, res) => {
try {
// console.log(req.file);
console.log(__dirname);
if (
!req.file.mimetype == "image/jpg" ||
!req.file.mimetype == "image/png" ||
!req.file.mimetype == "image/jpeg"
)
throw Error("invalid file");
if (req.file.size > 2818128) throw Error("max size");
} catch (err) {
const errors = uploadErrors(err);
return res.status(201).json({ err });
}
const fileName = req.body.name + ".jpg";
await pipeline(
req.file.stream,
fs.createWriteStream(
`${__dirname}/../client/public/uploads/profil/${fileName}`
)
);
try {
await User.findByIdAndUpdate(
req.body.userId,
{ $set: { picture: "./uploads/profil/" + fileName } },
{ new: true, upsert: true, setDefaultsOnInsert: true },
(err, docs) => {
if (!err) return res.send(docs);
else return res.status(500).send({ message: err });
}
);
} catch (err) {
return res.status(500).send({ message: err });
}
};
Multer gives you the file as a Buffer, not a Stream. req.file.stream is not valid property, but req.file.buffer is: https://github.com/expressjs/multer#file-information.
From the look of your code, you're trying to save the file on disk. You can use multer's DiskStorage for that. Create a storage instance and pass it to the multer instance as a configuration:
const storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, `${__dirname}/../client/public/uploads/profil/`);
},
filename: function (req, file, cb) {
cb(null, req.body.name + '.jpg');
},
});
const upload = multer({ storage });
router.post('/upload', upload.single('file'), upload);
Have a look at this free Request Parsing in Node.js Guide for working with file uploads in Node.js.
if you want to use req.file.stream, you will need to install this version of multer:
npm install --save multer#^2.0.0-rc.1
and your code will work perfectly, just change your req.file.mimetype to req.file.detectedMimeType !!
I am trying to import data in excel file to mysql just like row colon using nodejs are there any references i can learn or any module in nodejs that does my work or any sample code
I used Npm packages "xlsx-to-json-lc" and "xls-to-json-lc" to import excel file to json directly without converting to csv. Hope this helps...
var storage = multer.diskStorage({ //multers disk storage settings
destination: function (req, file, cb) {
cb(null, './uploads/')
},
filename: function (req, file, cb) {
var datetimestamp = dateFormat(new Date(), "yyyy~mm~dd h~MM~ss");
cb(null, '`enter code here`templete' + '-' + datetimestamp + '.' +
`enter code here`file.originalname.split('.')[file.originalname.split('.').length - 1])
filename = file.fieldname;
}
});
var upload = multer({ //multer settings
storage: storage,
fileFilter: function (req, file, callback) { //file filter
if (['xls', 'xlsx'].indexOf(file.originalname.split('.')[file.originalname.split('.').length - 1]) === -1) {
return callback(new Error('Wrong extension type'));
}
callback(null, true);
}
}).single('file');
var exceltojson;
upload(req, res, function (err) {
if (err) {
res.json({ error_code: 1, err_desc: err });
return;
}
if (!req.file) {
//res.json({ error_code: 1, err_desc: err });
return;
}
if (req.file.originalname.split('.')[req.file.originalname.split('.').length - 1] === 'xlsx') {
exceltojson = xlsxtojson;
} else {
exceltojson = xlstojson;
}
try {
exceltojson({
input: req.file.path,
output: null, //since we don't need output.json
//lowerCaseHeaders: true
}, function (err, result) {
if (err) {
return res.json({ error_code: 1, err_desc: err, data: null });
}
else {
console.log(result);
}
});
})
I have deployed a cloud function to invoke a dataflow pipeline template and trying to trigger the function by placing the file in cloud storage bucket.
As node.js prerequisite I have done,
npm init
npm install --save googleapis
Index.js
const google = require('googleapis');
exports.goWithTheDataFlow = function(event, callback) {
const file = event.data;
google.auth.getApplicationDefault(function (err, authClient, projectId) {
if (err) {
throw err;
}
console.log(projectId);
if (authClient.createScopedRequired && authClient.createScopedRequired()) {
authClient = authClient.createScoped([
'https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/userinfo.email'
]);
}
const dataflow = google.dataflow({ version: 'v1b3', auth: authClient });
console.log(`gs://${file.bucket}/${file.name}`);
dataflow.projects.templates.create({
projectId: projectId,
resource: {
parameters: {
inputFile: `gs://${file.bucket}/${file.name}`
},
jobName: 'cloud-fn-dataflow-test',
gcsPath: 'gs://jaison/templates/ApacheBeamTemplate'
}
}, function(err, response) {
if (err) {
console.error("problem running dataflow template, error was: ", err);
}
console.log("Dataflow template response: ", response);
callback();
});
});
callback();
};
Command used to deploy cloud function:
gcloud beta functions deploy goWithTheDataFlow --stage-bucket cf100stage --trigger-bucket cf100
Dataflow(Apache beam):
I was able to execute the dataflow template from console and below is the path of the template,
'gs://jaison/templates/ApacheBeamTemplate'
Function crashes with below error:
TypeError: Cannot read property 'getApplicationDefault' of undefined
at exports.goWithTheDataFlow (/user_code/index.js:11:17) at
/var/tmp/worker/worker.js:695:16 at /var/tmp/worker/worker.js:660:9 at
_combinedTickCallback (internal/process/next_tick.js:73:7) at process._tickDomainCallback (internal/process/next_tick.js:128:9)
Looks like I am missing libraries. Not sure how to fix this. Please help.
My cloud function works with below changes,
1.Setting up GOOGLE_APPLICATION_CREDENTIALS to service account json file
export GOOGLE_APPLICATION_CREDENTIALS="/path/of/svc/json/file.json"
2.index.js
var {google} = require('googleapis');
exports.TriggerBeam = (event, callback) => {
const file = event.data;
const context = event.context;
console.log(`Event ${context.eventId}`);
console.log(` Event Type: ${context.eventType}`);
console.log(` Bucket: ${file.bucket}`);
console.log(` File: ${file.name}`);
console.log(` Metageneration: ${file.metageneration}`);
console.log(` Created: ${file.timeCreated}`);
console.log(` Updated: ${file.updated}`);
google.auth.getApplicationDefault(function (err, authClient, projectId) {
if (err) {
throw err;
}
console.log(projectId);
const dataflow = google.dataflow({ version: 'v1b3', auth: authClient });
console.log(`gs://${file.bucket}/${file.name}`);
dataflow.projects.templates.create({
projectId: projectId,
resource: {
parameters: {
inputFile: `gs://${file.bucket}/${file.name}`
},
jobName: 'cloud-fn-beam-test',
gcsPath: 'gs://jaison/templates/ApacheBeamTemplate'
}
}, function(err, response) {
if (err) {
console.error("problem running dataflow template, error was: ", err);
}
console.log("Dataflow template response: ", response);
callback();
});
});
callback();
};
I am uploading file through my rest mean application, but i am not able to retrieve it back from backend . how do i retrieve it,
Here is upload code
var storage = multer.diskStorage({
destination: function (req, file, callback) {
callback(null, './uploads');
},
filename: function (req, file, callback) {
callback(null, mongoose.Types.ObjectId() + '-' + file.originalname);
}
});
var upload = multer({ storage : storage }).array('userPhoto',10);
upload(req,res,function(err) {
console.log(req.files);
var images =[];
for(var i=0; i<req.files.length; i++){
images[i]=req.files[i].path;
}
var newalbum = new albummodel({
image:images
});
newalbum.save(function(err, albm) {
if(err) {
res.json({success: false, msg: 'can't store.'});
} else {
console.log(albm);
}
});
if(err) {
return res.end("Error uploading file.");
}
res.end("File is uploaded");
});
Here is my retrieve code
albummodel.findOne({_id:req.params.id},function(err, docs){
res.json(docs);
})
This whole discussion help you. Please refer it.
https://github.com/Automattic/mongoose/issues/3079