Invalid Character Base64 Firebase.Storage() - node.js

I have been trying to post an image to Firebase.Storage() for over a week now with no success. First using a blob/file which ended up not working due to Node.JS not having a blob type. Now with posting as base64. I have tried every fix I could find on here with no success. The error I am getting is:
Firebase Storage: String does not match format 'base64: Invalid character found
client side
handleImage = ( event ) => {
const target = event.target
const files = target.files
if (files.length > 0) {
const fileToLoad = files[0];
const fileReader = new FileReader();
fileReader.onload = event => {
const srcData = event.target.result; // data: base64
console.log(srcData)
let encodedImage = srcData.split(/,(.+)/)[1];
const options = {
file: encodedImage,
fileName: fileToLoad.name
}
axios.post(`${process.env.API_URL}/api/dashboard/post-image`, options)
}
fileReader.readAsDataURL(fileToLoad)
}
}
server side
router.post('/post-image', (req, res, next) => {
console.log(req.body.file)
const fileName = req.body.fileName
const message = req.body.file
const storageRef = firebase.app().storage().ref()
storageRef.child(`${fileName}`).putString(message, 'base64')
.then(snapshot => {
console.log(snapshot, 'uploaded image')
})
})
Any help would be greatly appreciated.

Related

How to get Dicom lossless image from google cloud?

This is my code to get a dicom image from Google Cloud. It works well, but the image is lossy.
router.get("/dicomWebRetrieveInstance/dcmfile", async (req, res, next) => {
const writeFile = util.promisify(fs.writeFile);
emphasized textconst fileName = 'rendered_image.jpeg';
const cloudRegion = "us";
const projectId = "neurocaredicom";
const datasetId = "System_1";
const dicomStoreId = "Site_1A";
const studyUid = "1.2.276.0.7230010.3.1.2.296485376.1.1521713579.1849134";
const seriesUid = "1.2.276.0.7230010.3.1.3.2`enter code here`96485376.1.1521713580.1849651";
const instanceUid = "1.2.276.0.7230010.3.1.4.296485376.1.1521713580.1849652";
const parent = `projects/${projectId}/locations/${cloudRegion}/datasets/${datasetId}/dicomStores/${dicomStoreId}`;
const dicomWebPath = `studies/${studyUid}/series/${seriesUid}/instances/${instanceUid}/rendered`;
const request = {parent, dicomWebPath};
const rendered =
await healthcare.projects.locations.datasets.dicomStores.studies.series.instances.retrieveRendered(
request,
{
headers: "application/octet-stream; transfer-syntax=* ",
responseType: 'arraybuffer',
}
);
const fileBytes = Buffer.from(rendered.data);
await writeFile(fileName, fileBytes);
var options = {
root: path.join(__dirname),
};
// read binary data
var bitmap = fs.readFileSync(fileName);
// convert binary data to base64 encoded string
res.status(200).sendFile(fileName, options, function (err) {
if (err) {
next(err);
} else {
console.log(
`Retrieved rendered image and saved to ${fileName} in current directory`
);
}
});
});
Any solution to that problem would be appreciated, so anyone can help.

google cloud function not uploading to bucket but no error in function

I have a NodeJS function that writes several small svg files locally and then is attempting to upload those files to cloud bucket.
in the function log, i am only seeing the message that file written to local disk. now will upload. But there is no file in the bucket and no error logged anywhere. i have made sure the timeout is set to 9 min (max) so i am sure its not timing out. what else shoudl i check?
any pointers will be appreciated.
exports.createQRCode = functions.storage.object().onFinalize(async (object) =>{
const qrcodeMonkeyKey = functions.config().qrcodemonkey.key;
//console.log(`key for qrcode monkey is ${qrcodeMonkeyKey}`);
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const contentType = object.contentType; // File content type.
const metageneration = object.metageneration; // Number of times metadata has been generated. New objects have a value of 1.
console.log(fileBucket);
console.log(filePath);
if(!filePath.toLowerCase().endsWith('.csv'))
return console.log('not a csv so no need to anything fancy');
const bucket = admin.storage().bucket(fileBucket);
const filePathComps = filePath.split('/');
const folderName = filePathComps[filePathComps.length-3];
if(folderName !== "qrcode")
return console.log('not a qr code csv so no need to anything fancy');
const fileName = filePathComps[filePathComps.length-1];
console.log(fileName);
const path = require('path');
const os = require('os');
const fs = require('fs');
const tempFilePath = path.join(os.tmpdir(), fileName);
const metadata = {
contentType: contentType,
};
await bucket.file(filePath).download({destination: tempFilePath});
const csv = require('csv-parser')
const results = [];
fs.createReadStream(tempFilePath)
.pipe(csv({headers:
['uri','filename','foldername']
,skipLines:1
}))
.on('data', async (data) => {
const x = data;
results.push(data);
//results.push({id:x.id,phoneNumber:x.phoneNumber,isInternational:x.isInternational,message:x.messageText,respStatus:resp.status,responsedata:resp.data});
})
.on('end',async () => {
pArray = [];
results.forEach(x =>{
pArray.push(createQRCodeAndUpload(qrcodeMonkeyKey,x.filename,x.uri,x.foldername));
});
const finaloutput = await Promise.all(pArray);
console.log(JSON.stringify(finaloutput));
return;
});
});
const createQRCodeAndUpload = async (qrcodeMonkeyKey,fileName, url,foldername) =>{
const bucket = admin.storage().bucket('vmallapp.appspot.com');
const path = require('path');
const os = require('os');
const fs = require('fs');
var axios = require("axios").default;
console.log('processing ' + url);
if(url !==""){
const dataToSend = {
data : url,
config :{
body:'circle',
eye:'frame14',
eyeBall:'ball16',
bodyColor:"#032b5c",
bgColor:"#84d4e2",
"logo":"ae600e1267b9e477f0b635b60ffaec1d1c18d93b.png"
},
size:1200,
download:false,
file:'svg',
gradientOnEyes:true
}
var options = {
method: 'POST',
url: 'https://qrcode-monkey.p.rapidapi.com/qr/custom',
headers: {
'content-type': 'application/json',
'x-rapidapi-host': 'qrcode-monkey.p.rapidapi.com',
'x-rapidapi-key': qrcodeMonkeyKey
},
data: dataToSend
};
var response = await axios.request(options);
console.log('qrcode monkey returned status' + response.status);
const outputFilePath = path.join(os.tmpdir(), `${fileName}.svg`);
fs.writeFileSync(outputFilePath, response.data);
console.log(`${fileName}.svg written to local disk. now will upload`);
try{
await bucket.upload(outputFilePath, {
destination: `qrcode/output/${fileName}.svg`
});
}catch(error){
console.log('error in uploding ' + error);
}
console.log('lets delete the file now and clean up local storage');
fs.unlinkSync(outputFilePath);
return 'all done';
}
}

how to save excel file converted to base64 in directory with xlsx?

I am trying to save an excel file that I get from a post as base64, this conversion is done in my view, once I convert it, I try to save it with the xlsx library, this file is saving fine but when opening the file, it does not contain nothing. Can someone help me, in knowing what I am doing wrong?
my following code is:
private async getCurp(req: Request, res: Response) {
var datos = [];
let arrayusers = {};
const {
curp
} = req.body;
const newCurp = new CurpModel({
curp
});
const path = "C:\\Miroute"
var bufferFile = Buffer.from(curp, "base64");
const data = XLSX.read(bufferFile, { type: 'buffer' })
XLSX.writeFile(data, "excel.xls");
try {
return Ok<CurpType>(res, newCurp);
}
catch (error) {
console.log(error);
return ServerError(res);
}
In my component I convert my excel file to base64 in this way, iam using react
handleFileChange = e => {
let idCardBase64 = '';
this.getBase64(e.target.files[0], (result) => {
idCardBase64 = result;
console.log(idCardBase64)
this.setState({
file: idCardBase64,
})
});
}
getBase64(file, cb) {
let reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = function () {
cb(reader.result)
};
reader.onerror = function (error) {
console.log('Error: ', error);
};
}
Please can someone help me?
My solution: I had not realized that my input parameter had a different name so it was undefined, the code looked like this:
private async getCurp(req: Request, res:Response){
var datos = [];
let arrayusers = {};
const {
file
} = req.body;
const newCurp = new CurpModel({
file
});
const bufferExcel = Buffer.from(newCurp.file.toString().replace("data:application/vnd.openxmlformats-officedocument.spreadsheetml.sheet;base64", ""),'base64');
const workbook = XLSX.read(bufferExcel, { type: 'buffer' });
const sheetNamesList = workbook.SheetNames;
// parse excel data to json
const excelData = XLSX.utils.sheet_to_json(workbook.Sheets[sheetNamesList[0]]);
console.log(excelData);
try{
return Ok<CurpType>(res, newCurp);
}
catch(error){
console.log(error);
return ServerError(res);
}
}

How to chain writeFile() and OCR with NodeJS in Google Cloud Functions?

The scenario is as follows:
From an Amazon S3 bucket a file is fetched, then it is stored in a temporary folder and then Object Character Recognition is to be performed using the API.
Unfortunately, this doesn't work, I think it's due to the asynchronous/synchronous execution, but I've already tried several variants with callbacks/promises and didn't get any further.
If someone can give me a hint on how to construct this scenario I would be grateful!
The current error is:
TypeError: Cannot read property 'writeFile' of undefined at Response.<anonymous> (/srv/index.js:38:32) (it's the 'await fs.writeFile(dir,data);' line)
/**
* Responds to any HTTP request.
*
* #param {!express:Request} req HTTP request context.
* #param {!express:Response} res HTTP response context.
*/
const AWS = require('aws-sdk');
const fs = require('fs').promises;
const Vision = require('#google-cloud/vision');
var os = require('os');
exports.helloWorld = async (req,res) => {
var bucket, fileName, fileUrl;
req.on('data', chunk => {
body += chunk.toString();
data.push(chunk);
});
req.on('end', () => {
bucket = JSON.parse(data).value1;
fileName = JSON.parse(data).value2;
fileUrl = JSON.parse(data).value3;
var s3 = new AWS.S3();
s3.getObject({
Bucket: bucket,
Key: fileName
},
async function(error, data) {
if (error != null) {
console.log("Failed to retrieve an object: " + error);
} else {
console.log("Loaded " + data.ContentType + " bytes");
var tmpdir = os.tmpdir();
var dir = tmpdir+'/'+fileName;
try{
await fs.writeFile(dir,data);
const vision = new Vision.ImageAnnotatorClient();
let text;
await vision
.textDetection('/tmp/' + fileName)
.then(([detections]) => {
const annotation = detections.textAnnotations[0];
console.log(1);
text = annotation ? annotation.description : '';
console.log(`Extracted text from image (${text.length} chars)`);
console.log(1);
console.log(text);
resolve("Finished ocr successfully");
})
.catch(error =>{
console.log(error);
reject("Error with OCR");
})
}catch(error){
console.log(error);
}
}
},
);
let message = bucket + fileName + fileUrl;
res.status(200).send(message);
});
};
You're getting that error, because you're running on an older version of Node (< 10.0.0), where fs.promises is not available. That's why fs is undefined, and you're getting:
TypeError: Cannot read property 'writeFile' of undefined at Response.<anonymous> (/srv/index.js:38:32) (it's the 'await fs.writeFile(dir,data);' line)
Either use a newer version, or just promisify the code.
const { promisify } = require('util');
const fs = require('fs');
// const fs = require('fs').promises
const writeFile = promisify(fs.writeFile);
And now use writeFile instead of fs.writeFile in your code.
Aside from that, there are a few issues with your code.
req.on('data', chunk => {
body += chunk.toString();
data.push(chunk);
});
data is not defined anywhere, and it doesn't make sense to push data into an array and then running JSON.parse on that array, given the next few lines.
bucket = JSON.parse(data).value1;
fileName = JSON.parse(data).value2;
fileUrl = JSON.parse(data).value3;
Furthermore, JSON.parse should be called only once, instead of parsing the same string (which is an array in your code, and will yield an error) 3 times.
const values = JSON.parse(body); // should be body instead of data with the posted code
bucket = values.value1;
fileName = values.value2;
fileUrl = values.value3;
This can be improved greatly by just posting bucket, fileName & fileUrl in the JSON instead of valueN.
const { bucket, fileName, fileUrl } = JSON.parse(body);
The whole code can be rewritten into:
const AWS = require('aws-sdk');
const { promisify } = require('util');
const fs = require('fs');
const Vision = require('#google-cloud/vision');
const os = require('os');
const path = require('path');
const writeFile = promisify(fs.writeFile);
exports.helloWorld = async (req,res) => {
let body = '';
req.on('data', chunk => {
body += chunk.toString();
});
req.on('end', async() => {
// post { "bucket": "x", "fileName": "x", "fileUrl": "x" }
const { bucket, fileName, fileUrl } = JSON.parse(body);
var s3 = new AWS.S3();
try {
const data = await s3.getObject({
Bucket: bucket,
Key: fileName
}).promise();
const tmpdir = os.tmpdir();
const filePath = path.join(tmpdir, fileName)
await writeFile(filePath, data);
const vision = new Vision.ImageAnnotatorClient();
const [detections] = await vision.textDetection(filePath)
const annotation = detections.textAnnotations[0];
const text = annotation ? annotation.description : '';
console.log(`Extracted text from image (${text.length} chars)`);
let message = bucket + fileName + fileUrl;
res.status(200).send(message);
} catch(e) {
console.error(e);
res.status(500).send(e.message);
}
});
};
NOTE: I don't know if Vision API works like this, but I used the same logic and parameters that you're using.

Node.js Firebase Function sending Base64 image to External API

I’m using Firebase Functions with a Storage trigger in Node.js to send uploaded image data to an external API endpoint where photos are uploaded.
I’m currently taking images uploaded to a bucket in my Firebase storage, converting them to base64 strings, and plug them into my dictionary for the request.
My current issue is that seems like the dictionary is being cut short. I looked at the console logs on the Firebase console and seems like it ends after the base64 variable.
I’m not sure whether this is a bug with the syntax, or with the way I’m using the base64, or with Firebase Functions. If anyone knows what might be going on, please let me know.
const request = require('request-promise');
const gcs = require('#google-cloud/storage')();
const path = require('path');
const os = require('os');
const fs = require('fs');
const firebase = require('firebase');
exports.identifyUpdate = functions.storage.object().onFinalize((object) => {
const fileBucket = object.bucket;
const filePath = object.name;
const contentType = object.contentType;
const fileName = path.basename(filePath);
if(!filePath.substring(0,filePath.indexOf('/')) == 'updates') {
console.log("Triggered by non-update photo")
return null;
}
console.log("Update photo added")
// Create Firebase app (for Realtime Database access)
var config = {
apiKey: "[apikey]",
authDomain: "[PROJECT_ID].firebaseapp.com",
databaseURL: "https://[PROJECT_ID].firebaseio.com",
storageBucket: "[PROJECT_ID].appspot.com",
};
if(!firebase.apps.length) {
firebase.initializeApp(config);
}
// Trace back to Update stored in Realtime Database
const database = firebase.database().ref()
const pendingRef = database.child('pendingUpdates')
console.log(filePath)
const splitPath = filePath.split(path.sep)
const patientID = splitPath[1]
console.log('Patient ID: ' + patientID)
const updateID = splitPath[2]
console.log('Update ID: ' + updateID)
const updateRef = pendingRef.child(patientID).child(updateID)
console.log('Found Update reference')
const photoRef = updateRef.child('photoURLs').child(fileName)
console.log('Photo Reference: ' + photoRef)
// Download and convert image to base64
const bucket = gcs.bucket(fileBucket)
const tempFilePath = path.join(os.tmpdir(), fileName)
const metadata = {
contentType: contentType
};
var base64;
return bucket.file(filePath).download({
destination: tempFilePath
}).then(() => {
console.log('Image downloaded locally to', tempFilePath)
}).then(() => {
base64 = base64_encode(tempFilePath)
console.log("Base 64: " + base64)
}).then(() => {
// Send image data to Kairos
var options = {
method: 'POST',
uri: 'https://api.kairos.com/recognize',
body: {
'image': base64,
'gallery_name': 'gallerytest1'
},
headers: {
'app_id': '[id]',
'app_key': '[key]'
},
json: true
}
return new Promise (() => {
console.log(options)
request(options)
.then(function(repos) {
console.log('API call succeeded');
console.log('Kairos response: ' + repos);
const apiResult = repos['images']['transaction']['subject_id']
console.log("Transaction " + JSON.stringify(apiResult))
})
.catch(function(err) {
console.log('API call failed')
})
});
})
// Delete app instance (to prevent concurrency leaks)
const deleteApp = () => app.delete().catch(() => null);
deleteApp.call
})
function base64_encode(file) {
// read binary data
var bitmap = fs.readFileSync(file);
// convert binary data to base64 encoded string
return new Buffer(bitmap).toString('base64');
}
Image Output:

Resources