i need to download files for my project. This is my current code:
const fs = require("fs")
const https = require("https")
const url = "https://f52.workupload.com/download/YYHAXY6t9VK"
https.get(url,(res) => {
// Image will be stored at this path
const path = `${__dirname}/clayent.zip`;
const filePath = fs.createWriteStream(path);
res.pipe(filePath);
filePath.on('finish',() => {
filePath.close();
console.log('Download Completed');
})
})
It works with pngs, but not with zip-files. Is there a way to make it possible?
Related
I'm playing around with Google Vision autoML in react and I'm trying to figure out a way to change fs.readFileSync to an online url. I've used https, request and even url in node.js but I still can't figure out a way to make it read from a url source. Any help is appreciated, thank you
const projectId = "abc"
const location = "abc"
const modelId = "abc"
const filePath = "./test-pics/7.jpg"
const { PredictionServiceClient } = require("#google-cloud/automl").v1
const https = require("https")
const request = require("request")
const url = require("node:url")
const fs = require("fs")
const client = new PredictionServiceClient({
keyFilename: "./vision-private-key.json",
})
const imageURL = "https://cdn.filestackcontent.com/IJ0kViHQQyiwQTVaFH66"
// Read the file content for translation.
const content = fs.readFileSync(filePath)
const content1 = https.get(imageURL)
const content2 = request.get(
"https://cdn.filestackcontent.com/IJ0kViHQQyiwQTVaFH66"
)
async function predict() {
const request = {
name: client.modelPath(projectId, location, modelId),
payload: {
image: {
imageBytes: content,
},
},
}
const [response] = await client.predict(request)
for (const annotationPayload of response.payload) {
console.log(`type of vehicle: ${annotationPayload.displayName}`)
const carType = annotationPayload.displayName
console.log(carType)
}
}
predict()
I am trying to upload file onto IPFS.
My code is as below
var express = require('express')
var fs = require('fs')
const { create, globSource } = require('ipfs-http-client')
const ipfs = create()
....
const filesAdded = await ipfs.add(
{ path: fileName, content: file },
{
progress: (len) => console.log('Uploading file...' + len),
},
)
console.log(filesAdded)
const fileHash = filesAdded.cid.toString()
This code return hash value.
But i can't see my file(image) on https://ipfs.io/ipfs/{hash}
I am saving the base64 image using nodejs, it save my image at exact path, but show me the error.
Please help me to find the error.
Here is my code
var express = require('express');
var router = express.Router();
const fs = require('fs');
const mime = require('mime');
const path = './uploads';
router.post('/register', (req, res, next) => {
const base64Image = req.body.image;
const matches = base64Image.match(/^data:([A-Za-z-+\/]+);base64,(.+)$/);
response = {};
if (matches.length !== 3) {
return new Error('Invalid input String');
}
response.type = matches[1];
response.data = new Buffer(matches[2]);
let decodedImg = response;
let imageBuffer = decodedImg.data;
let type = decodedImg.type;
let extension = mime.extension(type);
let fileName = 'image.' + extension;
try {
fs.writeFileSync(path + '/' + fileName, imageBuffer, 'utf8');
return res.send({ status: 'success' });
} catch (e) {
next(e);
}
return;
});
module.exports = router;
Any solution appreciated!
The mistake you made is when you are creating a buffer you are not specifying an encoding. You should create buffer like this:
new Buffer() is deprecated use Buffer.from() instead.
let buff = Buffer.from(m[2],'base64'); // encoding type base64
Basic code snippet
const fs = require('fs')
let a = 'base64ImageString'
let m = a.match(/^data:([A-Za-z-+\/]+);base64,(.+)$/);
let b = Buffer.from(m[2],'base64');
fs.writeFile('image.png',b,function(err){
if(!err){
console.log("file is created")
}
});
Also, when you writing a buffer to file you don't have to pass encoding type, but if writing string you have to.
Check this out for a demo
https://repl.it/repls/GrimOfficialLocations
But it is not advised to send an image as base64 string. It is
inefficient for large images. base64 roughly takes 33% more bits than
its binary equivalent. I recommend you to check this out:
Upload base64 image with Ajax
Need to tweak the following code so it copies the whole of sourceFolder instead of just the event file itself but not sure how to do this.
const {Storage} = require('#google-cloud/storage');
const {path} = require('path');
exports.copyRenders = (event, context) => {
const gcsEvent = event;
const sourcePathOnly = gcsEvent.name
const folderToWatch = ' ... folder on source bucket... '
// Process only if it's in the correct folder
if (sourcePathOnly.indexOf(folderToWatch) > -1) {
const storage = new Storage();
const sourceFileBucket = gcsEvent.bucket
const sourceFolder = sourcePathOnly.split('/').slice(-2)
const destFileBucket = 'trans-test'
storage
.bucket(sourceFileBucket)
.file(sourcePathOnly)
.copy(storage.bucket(destFileBucket).file(sourceFolder[0] + '/' +
sourceFolder[1]));
}
console.log(`Processing file: ${sourcePathOnly}`);
}
With new code from answer bellow:
const {Storage} = require('#google-cloud/storage');
const {path} = require('path');
exports.copyRenders = (event, context) => {
const gcsEvent = event;
const sourcePathOnly = gcsEvent.name
const folderToWatch = ' ... folder on source bucket... '
// Process only if it's in the correct folder
if (sourcePathOnly.indexOf(folderToWatch) > -1) {
const storage = new Storage();
const sourceFileBucket = gcsEvent.bucket
const sourceFolder = sourcePathOnly.split('/').slice(-2)
const destFileBucket = 'trans-test'
const options = {
// Get the source path without the file name)
prefix: sourcePathOnly.slice(0,sourcePathOnly.lastIndexOf("/")),
};
const [files] = storage.bucket(sourceFileBucket).getFiles(options);
files.forEach(file => {
file.copy(storage.bucket(destFileBucket).file(sourceFolder[0] + '/' + sourceFolder[1]));
});
}
console.log(`Processing file: ${sourcePathOnly}`);
}
You can call getFiles() to build a list of files in your directory, and then copy the files. Something like this (you may have to modify for your scenario):
const [ files ] = await storage.bucket(sourceFileBucket).getFiles({
autoPaginate: false,
prefix: sourceFolder
});
files.forEach(file => file.copy( ... ));
You can copy all the file of a directory prefix
const options = {
// Get the source path without the file name)
prefix: sourcePathOnly.slice(0,sourcePathOnly.lastIndexOf("/")),
};
const [files] = storage.bucket(sourceFileBucket).getFiles(options);
files.forEach(file => {
file.copy(storage.bucket(destFileBucket).file(sourceFolder[0] + '/' +
sourceFolder[1]));
});
Be careful, each time that your function will be trigger, the whole prefix matches will be copied. And if you copy in the same bucket, you can create an exponential infinite loop!
Note: mathematically, I'm not know if something infinite can be exponential!!
I'm trying to grab some HD images from urls, resize them and upload to storage.
So far, i've gotten the image, and resized using sharp. The output API of sharp uses .toFile('output.jpg') or .toBuffer(), and I'm not sure how to proceed from here. What would be the easiest way to output the image, and upload it to firebase storage?
My code so far:
const functions = require('firebase-functions');
const admin = require('firebase-admin');
admin.initializeApp();
const request = require('request').defaults({ encoding: null });
const sharp = require('sharp');
exports.copyThumbnailImage = functions.firestore.document('users/{userId}/vocab/{vocabId}').onCreate((snapshot, context) => {
// mock: copyThumbnailImage({ chosenImages: [{ googleThumbnailUrl: "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcQlC7Vnu9CuZlA-nTpW8TLPd8dAE456LCpeXoadUKHoxB7WEmM1rfahqsfr", mime: "image/jpeg", originalUrl: "https://storage.tenki.jp/storage/static-images/suppl/article/image/2/27/278/27810/1/large.jpg" }] }, { params: { userId: 'zYCw6DmcEiOS8Yk4QltYPikykwP2', vocabId: 1420970 } })
const data = snapshot.data()
const vocabId = context.params.vocabId
const images = data.chosenImages
const checkExistencePromises = []
// Promises check if image ref already exists in firestore
images.forEach(imageObj => {
checkExistencePromises.push(db.collection('userImages').where('originalUrl', '==', imageObj.originalUrl).where('vocabId', '==', vocabId).get())
})
return Promise.all(checkExistencePromises)
.then(snapshots => {
const getImagePromises = []
snapshots.forEach((snapshot, i) => {
if (snapshot.empty) {
// if image doesn't exist already, upload thumbnail to DB, add doc to userImages and add link to review
const originalUrl = images[i].originalUrl
getImagePromises.push(getImage(originalUrl))
} else {
// add link to review
}
})
return Promise.all(getImagePromises)
})
.then(responses => {
responses.forEach(response => {
sharp(response).resize(200, 200).toBuffer()
// ????????
})
})
.then(() => {
})
.catch(error => {
console.log(error)
})
})
function getImage (url) {
return new Promise((resolve, reject) => {
request.get(url, (err, res, body) => {
if (err) reject(err)
resolve(body)
})
})
}
You can save it to the local file system (the local /tmp disk) and upload it to Cloud Storage from there.
Have a look at this official Cloud Functions sample: https://github.com/firebase/functions-samples/blob/master/convert-images/functions/index.js. (I copy below the code for future reference)
In particular, look at how they save a temporary file with
return spawn('convert', [tempLocalFile, tempLocalJPEGFile]);
and how they upload it with:
return bucket.upload(tempLocalJPEGFile, {destination: JPEGFilePath});
In your case, instead of calling spawn() you would call
.toFile(-theTemporaryFielName-)
Finally, have a look at Write temporary files from Google Cloud Function and Attach firebase cloud function or cache its data from cloud function call about the /tmp disk.
Code from the Cloud Function Sample as of 08/01/2018 (link above)
const functions = require('firebase-functions');
const mkdirp = require('mkdirp-promise');
const gcs = require('#google-cloud/storage')();
const spawn = require('child-process-promise').spawn;
const path = require('path');
const os = require('os');
const fs = require('fs');
// File extension for the created JPEG files.
const JPEG_EXTENSION = '.jpg';
/**
* When an image is uploaded in the Storage bucket it is converted to JPEG automatically using
* ImageMagick.
*/
exports.imageToJPG = functions.storage.object().onFinalize((object) => {
const filePath = object.name;
const baseFileName = path.basename(filePath, path.extname(filePath));
const fileDir = path.dirname(filePath);
const JPEGFilePath = path.normalize(path.format({dir: fileDir, name: baseFileName, ext: JPEG_EXTENSION}));
const tempLocalFile = path.join(os.tmpdir(), filePath);
const tempLocalDir = path.dirname(tempLocalFile);
const tempLocalJPEGFile = path.join(os.tmpdir(), JPEGFilePath);
// Exit if this is triggered on a file that is not an image.
if (!object.contentType.startsWith('image/')) {
console.log('This is not an image.');
return null;
}
// Exit if the image is already a JPEG.
if (object.contentType.startsWith('image/jpeg')) {
console.log('Already a JPEG.');
return null;
}
const bucket = gcs.bucket(object.bucket);
// Create the temp directory where the storage file will be downloaded.
return mkdirp(tempLocalDir).then(() => {
// Download file from bucket.
return bucket.file(filePath).download({destination: tempLocalFile});
}).then(() => {
console.log('The file has been downloaded to', tempLocalFile);
// Convert the image to JPEG using ImageMagick.
return spawn('convert', [tempLocalFile, tempLocalJPEGFile]);
}).then(() => {
console.log('JPEG image created at', tempLocalJPEGFile);
// Uploading the JPEG image.
return bucket.upload(tempLocalJPEGFile, {destination: JPEGFilePath});
}).then(() => {
console.log('JPEG image uploaded to Storage at', JPEGFilePath);
// Once the image has been converted delete the local files to free up disk space.
fs.unlinkSync(tempLocalJPEGFile);
fs.unlinkSync(tempLocalFile);
return;
});
});