Resize all existing images stored in firebase storage and update the newly resized image url to database via api call [duplicate] - google-cloud-functions

This question already has an answer here:
How can I resize all existing images in firebase storage?
(1 answer)
Closed 9 months ago.
I have requirement to resize new and existing images stored in firebase store. For new image, I enabled firebase's resize image extension. For existing image, how can I resized the image and get the newly resized image url to update back to database via api.
Here is my firebase function to get existing image urls from database. My question is how to resize the image and get the new image url?
const functions = require("firebase-functions");
const axios =require("axios");
async function getAlbums() {
const endpoint = "https://api.mydomain.com/graphql";
const headers = {
"content-type": "application/json",
};
const graphqlQuery = {
"query": `query Albums {
albums {
id
album_cover
}
}`
};
functions.logger.info("Call API");
const response = await axios({
url: endpoint,
method: 'post',
headers: headers,
data: graphqlQuery
});
if(response.errors) {
functions.logger.info("API ERROR : ", response.errors) // errors if any
} else {
return response.data.data.albums;
}
}
exports.manualGenerateResizedImage = functions.https.onRequest(async () => {
const albums = await getAlbums();
functions.logger.info("No. of Album : ", albums.length);
});

I think the below answer from Renaud Tarnec will definitely help you.
If you look at the code of the "Resize Images" extension, you will see that the Cloud Function that underlies the extension is triggered by a onFinalize event, which means:
When a new object (or a new generation of an existing object) is
successfully created in the bucket. This includes copying or rewriting
an existing object.
So, without rewriting/regenerating the existing images the Extension will not be triggered.
However, you could easily write your own Cloud Function that does the same thing but is triggered, for example, by a call to a specific URL (HTTPS cloud Function) or by creating a new document in a temporary Firestore Collection (background triggered CF).
This Cloud Function would execute the following steps:
Get all the files of your bucket, see the getFiles() method of the
Google Cloud Storage Node.js Client API. This method returns a
GetFilesResponse object which is an Array of File instances.
By looping over the array, for each file, check if the file has a
corresponding resized image in the bucket (depending on the way you
configured the Extension, the resized images may be in a specific
folder)
If a file does not have a corresponding resized image, execute the
same business logic of the Extension Cloud Function for this File.
There is an official Cloud Function sample which shows how to create a Cloud Storage triggered Firebase Function that will create resized thumbnails from uploaded images and upload them to the database URL, (see the last lines of index.js file)
Note : If you have a lot of files to treat, you should most probably work by batch, since there is a limit of 9 minutes for Cloud Function execution. Also, depending on the number of images to treat, you may need to increase the timeout value and/or the allocated memory of your Cloud Function, see https://firebase.google.com/docs/functions/manage-functions#set_timeout_and_memory_allocation

In case someone need it. This is how I resized existing image.
const functions = require("firebase-functions");
const axios = require("axios");
const { Storage } = require("#google-cloud/storage");
const storage = new Storage();
// Don't forget to replace with your bucket name
const bucket = storage.bucket("projectid.appspot.com");
async function getAlbums() {
const endpoint = "https://api.mydomain.com/graphql";
const headers = {
"content-type": "application/json",
};
const graphqlQuery = {
query: `query Albums {
albums {
id
album_cover
}
}`,
};
const response = await axios({
url: endpoint,
method: "post",
headers: headers,
data: graphqlQuery,
});
if (response.errors) {
functions.logger.error("API ERROR : ", response.errors); // errors
if any
} else {
return response.data.data.albums;
}
}
function getFileName(url) {
var decodeURI = decodeURIComponent(url);
var index = decodeURI.lastIndexOf("/") + 1;
var filenameWithParam = decodeURI.substr(index);
index = filenameWithParam.lastIndexOf("?");
var filename = filenameWithParam.substr(0, index);
return filename;
}
function getFileNameFromFirestore(url) {
var index = url.lastIndexOf("/") + 1;
var filename = url.substr(index);
return filename;
}
const triggerBucketEvent = async () => {
bucket.getFiles(
{
prefix: "images/albums", // you can add a path prefix
autoPaginate: false,
},
async (err, files) => {
if (err) {
functions.logger.error(err);
return;
}
const albums = await getAlbums();
await Promise.all(
files.map((file) => {
var fileName = getFileNameFromFirestore(file.name);
var result = albums.find((obj) => {
return getFileName(obj.album_cover) === fileName;
});
if (result) {
var file_ext = fileName.substr(
(Math.max(0, fileName.lastIndexOf(".")) || Infinity) + 1
);
var newFileName = result.id + "." + file_ext;
// Copy each file on thumbs directory with the different name
file.copy("images/albums/" + newFileName);
} else {
functions.logger.info(file.name, " not found in album list!");
}
})
);
}
);
};
exports.manualGenerateResizedImage = functions.https.onRequest(async () => {
await triggerBucketEvent();
});

Related

How to save & retrive image to/from mysql?

Two part quersion.
Part 1:
Im uploading an image to my server and want to save it to my database.
So far:
table:
resolver:
registerPhoto: inSequence([
async (obj, { file }) => {
const { filename, mimetype, createReadStream } = await file;
const stream = createReadStream();
const t = await db.images.create({
Name: 'test',
imageData: stream ,
});
},
])
executing query:
Executing (default): INSERT INTO `images` (`Id`,`imageData`,`Name`) VALUES (DEFAULT,?,?);
But nothing is saved.
Im new to this and im probably missing something but dont know what.
Part2:
This is followed by part 1, lets say I manage to save the image, how do I read it and send it back to my FE?
An edit: Ive read alot of guides saving the an image name to the db and then tha actuall image in a folder. This is NOT what im after, want to save the image to the DB and then be able to fetch it from the DB abd present it.
This took me some time but I finaly figured it out.
First step (saving to the db):
Have to get the entire stream data and read it like this:
export const readStream = async (stream, encoding = 'utf8') => {
stream.setEncoding('base64');
return new Promise((resolve, reject) => {
let data = '';
// eslint-disable-next-line no-return-assign
stream.on('data', chunk => (data += chunk));
stream.on('end', () => resolve(data));
stream.on('error', error => reject(error));
});
};
use like this:
const streamData = await readStream(stream);
Before saving I tur the stream into a buffer:
const buff = Buffer.from(streamData);
Finaly the save part:
db.images.create(
{
Name: filename,
imageData: buff,
Length: stream.bytesRead,
Type: mimetype,
},
{ transaction: param }
);
Note that I added Length and Type parameter, this is needed if you like to return a stream when you return the image.
Step 2 (Retrieving the image).
As #xadm said multiple times you can not return an image from GRAPHQL and after some time I had to accept that fact, hopefully graphql will remedy this in the future.
S What I needed to do is set up a route on my fastify backend, send a image Id to this route, fetch the image and then return it.
I had a few diffirent approaches to this but in the end I simpy returned a binary and on the fronted I encoded it to base64.
Backend part:
const handler = async (req, reply) => {
const p: postParams = req.params;
const parser = uuIdParserT();
const img = await db.images.findByPk(parser.setValueAsBIN(p.id));
const binary = img.dataValues.imageData.toString('binary');
const b = Buffer.from(binary);
const myStream = new Readable({
read() {
this.push(Buffer.from(binary));
this.push(null);
},
});
reply.send(myStream);
};
export default (server: FastifyInstance) =>
server.get<null, any>('/:id', opts, handler);
Frontend part:
useEffect(() => {
// axiosState is the obj that holds the image
if (!axiosState.loading && axiosState.data) {
// #ts-ignore
const b64toBlob = (b64Data, contentType = '', sliceSize = 512) => {
const byteCharacters = atob(b64Data);
const byteArrays = [];
for (let offset = 0; offset < byteCharacters.length; offset += sliceSize) {
const slice = byteCharacters.slice(offset, offset + sliceSize);
const byteNumbers = new Array(slice.length);
// #ts-ignore
// eslint-disable-next-line no-plusplus
for (let i = 0; i < slice.length; i++) {
byteNumbers[i] = slice.charCodeAt(i);
}
const byteArray = new Uint8Array(byteNumbers);
byteArrays.push(byteArray);
}
const blob = new Blob(byteArrays, { type: contentType });
return blob;
};
const blob = b64toBlob(axiosState.data, 'image/jpg');
const urlCreator = window.URL || window.webkitURL;
const imageUrl = urlCreator.createObjectURL(blob);
setimgUpl(imageUrl);
}
}, [axiosState]);
and finaly in the html:
<img src={imgUpl} alt="NO" className="imageUpload" />
OTHER:
For anyone who is attempting the same NOTE that this is not a best practice thing to do.
Almost every article I found saved the images on the sever and save an image Id and other metadata in the datbase. For the exact pros and cons for this I have found the following helpful:
Storing Images in DB - Yea or Nay?
I was focusing on finding out how to do it if for some reason I want to save an image in the datbase and finaly solved it.
There are two ways to store images in your SQL database. You either store the actual image on your server and save the image path inside your mySql db OR you create a BLOB using the image and store it in db.
Here is a handy read https://www.technicalkeeda.com/nodejs-tutorials/nodejs-store-image-into-mysql-database
you should save the image in a directory and save the link of this image in the database

How to add media upload for BigQuery Rest API using UrlFetchApp?

I need to stream data into BigQuery from my Google Apps Script addon.
But I need to use my service account only (I need to insert data into my BigQuery table, not user's BigQuery table)
I followed this example: https://developers.google.com/apps-script/advanced/bigquery#load_csv_data
Because Apps Script Advanced Service doesn't support service account natively, so I need to change this example a bit:
Instead of using Advanced Service BigQuery, I need to get the OAuth token from my service account, then using BigQuery Rest API to handle the same job:
This is what I did:
function getBigQueryService() {
return (
OAuth2.createService('BigQuery')
// Set the endpoint URL.
.setTokenUrl('https://accounts.google.com/o/oauth2/token')
// Set the private key and issuer.
.setPrivateKey(PRIVATE_KEY)
.setIssuer(CLIENT_EMAIL)
// Set the property store where authorized tokens should be persisted.
.setPropertyStore(PropertiesService.getScriptProperties())
// Caching
.setCache(CacheService.getUserCache())
// Locking
.setLock(LockService.getUserLock())
// Set the scopes.
.setScope('https://www.googleapis.com/auth/bigquery')
)
}
export const insertLog = (userId, type) => {
const bigQueryService = getBigQueryService()
if (!bigQueryService.hasAccess()) {
console.error(bigQueryService.getLastError())
return
}
const projectId = bigqueryCredentials.project_id
const datasetId = 'usage'
const tableId = 'logs'
const row = {
timestamp: new Date().toISOString(),
userId,
type,
}
const data = Utilities.newBlob(convertToNDJson(row), 'application/octet-stream')
// Create the data upload job.
const job = {
configuration: {
load: {
destinationTable: {
projectId,
datasetId,
tableId,
},
sourceFormat: 'NEWLINE_DELIMITED_JSON',
},
},
}
const url = `https://bigquery.googleapis.com/upload/bigquery/v2/projects/${projectId}/jobs`
const headers = {
Authorization: `Bearer ${bigQueryService.getAccessToken()}`,
'Content-Type': 'application/json',
}
const options = {
method: 'post',
headers,
payload: JSON.stringify(job),
}
try {
const response = UrlFetchApp.fetch(url, options)
const result = JSON.parse(response.getContentText())
console.log(JSON.stringify(result, null, 2))
} catch (err) {
console.error(err)
}
}
As you can see in my code, I get the Blob data (which is the actual json data that I need to put in BigQuery table) using this line:
const data = Utilities.newBlob(convertToNDJson(row), 'application/octet-stream')
But I don't know where to use this data with the BigQuery Rest API
The documentation doesn't mention it: https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/insert
How this can be done? Thank you.
I can solve this problem using Tanaike's FetchApp library:
https://github.com/tanaikech/FetchApp#fetch
Anyone has this issue in the future: please check my comment in code to understand what was done.
Turn out, the job variable is treated as metadata, and the data variable is treated as file in the form data object
// First you need to convert the JSON to Newline Delimited JSON,
// then turn the whole thing to Blob using Utilities.newBlob
const data = Utilities.newBlob(convertToNDJson(row), 'application/octet-stream')
// Create the data upload job.
const job = {
configuration: {
load: {
destinationTable: {
projectId,
datasetId,
tableId,
},
sourceFormat: 'NEWLINE_DELIMITED_JSON',
},
},
}
const url = `https://bigquery.googleapis.com/upload/bigquery/v2/projects/${projectId}/jobs?uploadType=multipart`
const headers = {
Authorization: `Bearer ${bigQueryService.getAccessToken()}`,
}
const form = FetchApp.createFormData() // Create form data
form.append('metadata', Utilities.newBlob(JSON.stringify(job), 'application/json'))
form.append('file', data)
const options = {
method: 'post',
headers,
muteHttpExceptions: true,
body: form,
}
try {
FetchApp.fetch(url, options)
} catch (err) {
console.error(err)
}
Note: When you create the service account, choose role BigQuery Admin, or any role that has permission bigquery.jobs.create
https://cloud.google.com/bigquery/docs/access-control#bigquery-roles
Because if you don't, you will have the error
User does not have bigquery.jobs.create permission...

How to get metadata from Amazon Kinesis Video Streams via Video.js and http-streaming?

Now, I am working on client-side of Amazon Kinesis Video Streams, using video.js and http-streaming to display video.
However, on stream server there are some metadata (text only) for each fragment (as this link: https://aws.amazon.com/about-aws/whats-new/2018/10/kinesis-video-streams-fragment-level-metadata-support/).
I don't know how to get this data by using AWSJavaScriptSDK (Ex: https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/KinesisVideoMedia.html).
I've test with getMedia function, but it not working as expectation (just get media info one time, not each fragment)
var kinesisvideomedia = new AWS.KinesisVideoMedia({
//apiVersion: '2017-09-30',
region: options.region,
accessKeyId: options.accessKeyId,
secretAccessKey: options.secretAccessKey,
endpoint: response.DataEndpoint
});
// 3. Create the parameters for getMedia()
var mopts = {
StartSelector: {
StartSelectorType: 'EARLIEST'
},
StreamName: streamName
};
kinesisvideomedia.getMedia(mopts, function (error, vmresp) {
if (error) {
console.log(error);
}
//console.log(vmresp);
});
Many thanks for any support!
Your parameters only tells getMedia to grab the earliest fragment from the stream. If you want to get all the following fragments you have to use the ContinuationToken that was returned in the response from the previous call to getMedia when doing additional calls to getMedia.
Regarding the metadata on the fragment level, you need to parse the response payload, for example like in this example, using the video streams parser library
getMedia is not well documented in the js aws-sdk, the main trick is to use request.createReadStream() in order to stream the media chunks.
You could do it like
var kinesisvideomedia = new AWS.KinesisVideoMedia();
var kinesisvideo = new AWS.KinesisVideo();
const params = {
APIName: "GET_MEDIA",
StreamName: streamName
}
kinesisvideo.getDataEndpoint(params, function(err, data) {
if (err) {
throw(err)
}
console.log("Changing endpoint to", data.DataEndpoint);
kinesisvideomedia.endpoint = data.DataEndpoint;
var mopts = {
StartSelector: {
StartSelectorType: 'EARLIEST'
},
StreamName: streamName
};
const request = kinesisvideomedia.getMedia(mopts);
const stream = request.createReadStream();
stream.on('data', function(data) { console.log("data", data)})
});

aws lambda s3 function isn't called inside alexa skills kit

I am trying to create a skill for Amazon Echo that will call a JSON file from AWS S3. When I call the code from s3 basic get function it works. And the Amazon Alexa code works on its own.
But when I call them together the function gets skipped. So for the following code the console gets called before and after s3.getObject(). But the middle one gets skipped. I do not understand why.
I also checked whether s3 was being called, and it is.
let aws = require('aws-sdk');
let s3 = new aws.S3({ apiVersion: '2006-03-01'});
function callS3() {
console.log('loading S3 function');
var myData = [];
const params = {
Bucket: 'cvo-echo',
Key: 'data.json'
};
console.log("trying to get s3");
s3.getObject(params, (err, data) => {
if (err) {
console.log('error in s3 get: \n' + err);
//const message = `Error getting object ${key} from bucket ${bucket}.
// Make sure they exist and your bucket is in same region as this function.
//console.log(message);
} else {
console.log('CONTENT TYPE: ', data.ContentType);
console.log('Data body: \n' + data.Body.toString());
myData = JSON.parse(data.Body.toString());
console.log('myData.length = ' + myData.length);
}
console.log('myData >> ' + myData);
});
console.log('finished callS3() func');
return myData;
}
This might be a control flow issue, I've worked with amazons sdk before and was running into similar issues. Try implementing async within your code to have a better control of what happens when. This way methods won't skip.
UPDATE: adding some code examples of what you could do.
function callS3(callback) {
console.log('loading S3 function');
var myData = [];
const params = {
Bucket: 'cvo-echo',
Key: 'data.json'
};
console.log("trying to get s3");
s3.getObject(params, (err, data) => {
if (err) {
console.log('error in s3 get: \n' + err);
//const message = `Error getting object ${key} from bucket ${bucket}.
// Make sure they exist and your bucket is in same region as this function.
//console.log(message);
callback(err,null);//callback the error.
} else {
console.log('CONTENT TYPE: ', data.ContentType);
console.log('Data body: \n' + data.Body.toString());
myData = JSON.parse(data.Body.toString());
console.log('myData.length = ' + myData.length);
console.log('myData >> ' + myData);
console.log('finished callS3() func');
//Include the callback inside of the S3 call to make sure this function returns until the S3 call completes.
callback(null,myData); // first element is an error and second is your data, first element is null if no error ocurred.
}
});
}
/*
This MIGHT work without async but just in case you can read more about
async.waterfall where functions pass down values to the next function.
*/
async.waterfall([
callS3()//you can include more functions here, the callback from the last function will be available for the next.
//myNextFunction()
],function(err,myData){
//you can use myData here.
})
It's a timing issue. Here is an example of loading a JSON file from an S3 share when a session is started.
function onLaunch(launchRequest, session, callback) {
var sBucket = "your-bucket-name";
var sFile = "data.json";
var params = {Bucket: sBucket, Key: sFile};
var s3 = new AWS.S3();
var s3file = s3.getObject(params)
new AWS.S3().getObject(params, function(err, data) {
if (!err) {
var json = JSON.parse(new Buffer(data.Body).toString("utf8"));
for(var i = 0; i < json.length; i++) {
console.log("name:" + json[i].name + ", age:" + json[i].age);
}
getWelcomeResponse(callback);
} else {
console.log(err.toString());
}
});
}

AngularJS File Upload to Backend Express Server

I am trying to do a file upload using angularjs, using angular-file-upload library (https://github.com/danialfarid/angular-file-upload)
Here is my code
// ===============================My HTML File===========================
<input type="file" ng-file-select="onFileSelect($files)">
// ===============================My Controller==========================
var $scope.formObj = {
name: "Test"
};
var fileToUpload;
$scope.onFileSelect = function (file) {
fileToUpload = file[0];
};
// POSt request to /api/items
$scope.addItem = function() {
console.log($scope.formObj);
$scope.upload = $upload.upload({
url: '/api/items',
method: 'POST',
data: { myObj: $scope.formObj },
file: fileToUpload
}).success(function(data, status, headers, config) {
console.log("success");
});
};
// ================================My Backend=============================
// This is the function that will receive POST request to /api/items
exports.create = function(req, res) {
console.log(req.body); // req.body is just an empty object. ==> {}
// apparently, I found all the data to be in req._readableState.buffer[0]
// in the form of a buffer
var buffer = req._readableState.buffer[0];
// trying to console.log the buffer.toString, resulting in something similar to this
// { name: "Test", image: Object }
console.log(buffer.toString());
return res.send(200);
};
So my backend received the formObj with all its properties and values, however, the actual file data itself, whether in the form of buffer, or base64, or whatever, never gets received.
I wonder why. This is my first time working with file uploading, so I don't understand the concept.
Please point me in the right direction
If you are using Latest version of Express, you'd notice that
app.use(express.multipart()); is no longer bundled with express.
So do the following configuration changes. in express.js
var multer = require('multer');
app.use(multer({ dest: './uploads/'}));
You'd find that after doing this you would find the data and file , in req.body req.file respectively.
Hope it helps