Trying to upload file to azure storage blob using nodejs but facing promise issue. below is my code facing issue while uploading zip file to azure blob storage need to convert this code to callback function or .then function
var re = /\.zip/;
fs.readdir("/tmp/", function(err, files) {
if (err) {
console.log( "Could not list the directory.", err)
process.exit( 1 )
}
console.log("files",files)
var matches = files.filter( function(text) { return re.test(text) } )
console.log("These are the files you have", matches)
var numFiles = matches.length
console.log(numFiles);
if ( numFiles ) {
// Read in the file, convert it to base64, store to S3
for(let i = 0; i < numFiles; i++ ) {
uploadFileToBlob(matches[i])
.then((result: any) => {
console.log('result');
console.log(result);
})
.catch((error: any) => {
console.log(error);
});
}
}
})
const uploadFileToBlob = async (file: any) => {
const { AbortController } = require("#azure/abort-controller");
const { AnonymousCredential, BlobServiceClient, newPipeline } = require("#azure/storage-blob");
const account = 'string';
const accountSas = "sastoken";
const localFilePath = file;
const pipeline = newPipeline(new AnonymousCredential(), {
// httpClient: MyHTTPClient, // A customized HTTP client implementing IHttpClient interface
retryOptions: { maxTries: 4 }, // Retry options
userAgentOptions: { userAgentPrefix: "AdvancedSample V1.0.0" }, // Customized telemetry string
keepAliveOptions: {
// Keep alive is enabled by default, disable keep alive by setting false
enable: false
}
});
const blobServiceClient = new BlobServiceClient(
`https://${account}.blob.core.windows.net${accountSas}`,
pipeline
);
const containerName = 'quickstart';
const containerClient = blobServiceClient.getContainerClient(containerName);
const blobName = file;
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
console.log("blockBlobClient",blockBlobClient);
try {
const result =await blockBlobClient.uploadFile(localFilePath, {
blockSize: 4 * 1024 * 1024, // 4MB block size
concurrency: 20, // 20 concurrency
onProgress: (ev: any) => console.log(ev)
});
console.log("uploadFile succeeds");
return result;
} catch (err) {
console.log(
`uploadFile failed, requestId - ${err.details.requestId}, statusCode - ${err.statusCode}, errorCode - ${err.details.errorCode}`
);
throw err;
}
}
getting output
Promise { <pending> }
I tried to write code in callback function or .then function but getting same error and same code is working in local machine
Please change the following lines of code:
await blockBlobClient.uploadFile(localFilePath, {
blockSize: 4 * 1024 * 1024, // 4MB block size
concurrency: 20, // 20 concurrency
onProgress: (ev) => console.log(ev)
});
console.log("uploadFile succeeds");
to
const result = await blockBlobClient.uploadFile(localFilePath, {
blockSize: 4 * 1024 * 1024, // 4MB block size
concurrency: 20, // 20 concurrency
onProgress: (ev) => console.log(ev)
});
console.log("uploadFile succeeds");
return result;
And then call your uploadFileToBlob method like:
uploadFileToBlob('filename.zip')
.then((result) => {
console.log('result');
console.log(result);
})
.catch((error) => {
console.log(error);
});
UPDATE
Here's the complete code I used. I was able to upload the file successfully:
const uploadFileToBlob = async (file) => {
const { AbortController } = require("#azure/abort-controller");
const { AnonymousCredential, BlobServiceClient, newPipeline } = require("#azure/storage-blob");
const account = 'account-name';
const accountSas = '?sv=2020-04-08&ss=b&srt=sco&se=2021-03-08T18%3A30%3A00Z&sp=rwdxftlacup&sig=pT1d1NJQdu3bcnPA37voTCyQ6jKnmNVYgo1wyiYspDc%3D';
const localFilePath = file;
const pipeline = newPipeline(new AnonymousCredential(), {
// httpClient: MyHTTPClient, // A customized HTTP client implementing IHttpClient interface
retryOptions: { maxTries: 4 }, // Retry options
userAgentOptions: { userAgentPrefix: "AdvancedSample V1.0.0" }, // Customized telemetry string
keepAliveOptions: {
// Keep alive is enabled by default, disable keep alive by setting false
enable: false
}
});
const blobServiceClient = new BlobServiceClient(
`https://${account}.blob.core.windows.net${accountSas}`,
pipeline
);
const containerName = 'container-name';
const containerClient = blobServiceClient.getContainerClient(containerName);
const blobName = file;
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
console.log("blockBlobClient",blockBlobClient);
try {
const result = await blockBlobClient.uploadFile(localFilePath, {
blockSize: 4 * 1024 * 1024, // 4MB block size
concurrency: 20, // 20 concurrency
onProgress: (ev) => console.log(ev)
});
console.log("uploadFile succeeds");
return result;
} catch (err) {
console.log(
`uploadFile failed, requestId - ${err.details.requestId}, statusCode - ${err.statusCode}, errorCode - ${err.details.errorCode}`
);
throw err;
}
}
uploadFileToBlob('filename.zip')
.then((result) => {
console.log('result');
console.log(result);
})
.catch((error) => {
console.log(error);
});
Related
it is uploding to firebase cloud stroge but not return downloade url to array
i want to returen ore set download url to arry
How can i solve this problem
const uploadImage = () => {
var promises = uploadUrl.map(async (image, index) => {
let filename = image.substring(image.lastIndexOf('/') + 1);
const task = storage().ref(`complaintPhotos/${filename}`).putFile(image);
// promises.push(task);
task.on('state_changed', taskSnapshot => {
console.log(
`${taskSnapshot.bytesTransferred} transferred out of ${taskSnapshot.totalBytes}`,
);
});
try {
await task;
await storage()
.ref(`complaintPhotos/${filename}`)
.getDownloadURL()
.then(url => {
setUploadUri(prevState => [...prevState, url]);
});
} catch (e) {
console.log(e);
}
});
};
below is the working code in which it can post images but is there any way i can also share videos as instagram story?
the error i get when i try to post video instead of image are:**
error image
PS D:\Softwares\programming\Insta Bot\story> node index.js
18:45:11 - info: Dry Run Activated
18:45:11 - info: Post() called! ======================
18:45:11 - debug: 1 files found in ./images/
18:45:11 - warn: Record file not found, saying yes to D:\Softwares\programming\Insta Bot\story\images\meme.mp4
18:45:11 - debug: Read File Success
18:45:11 - error: undefined
(MAIN CODE)
index.js
const logger = require("./logger.js")
const { random, sleep } = require('./utils')
require('dotenv').config();
const { IgApiClient, IgLoginTwoFactorRequiredError } = require("instagram-private-api");
const ig = new IgApiClient();
const Bluebird = require('bluebird');
const inquirer = require('inquirer');
const { CronJob } = require('cron');
const path = require("path");
const fs = require("fs");
const fsp = fs.promises;
const sharp = require("sharp");
//==================================================================================
const statePath = "./etc/state.conf";
const recordPath = "./etc/usedfiles.jsonl";
const imgFolderPath = "./images/";
const dryrun = true;
const runOnStart = true;
//==================================================================================
(async () => { // FOR AWAIT
// LOGIN TO INSTAGRAM
if (!dryrun) {
await login();
logger.info("Log In Successful");
} else {
logger.info("Dry Run Activated");
}
// SCHEDULER
// logger.silly("I'm a schedule, and I'm running!! :)");
const job = new CronJob('38 43 * * * *', post, null, true); //https://crontab.guru/
if (!runOnStart) logger.info(`Next few posts scheduled for: \n${job.nextDates(3).join("\n")}\n`);
else post();
// MAIN POST COMMAND
async function post() {
logger.info("Post() called! ======================");
let postPromise = fsp.readdir(imgFolderPath)
.then(filenames => {
if (filenames.length < 1) throw new Error(`Folder ${imgFolderPath} is empty...`)
logger.debug(`${filenames.length} files found in ${imgFolderPath}`);
return filenames;
})
.then(filenames => filenames.map(file => path.resolve(imgFolderPath + file)))
.then(filenames => pickUnusedFileFrom(filenames, filenames.length))
.then(filename => {
if (!dryrun) registerFileUsed(filename)
return filename
})
.then(fsp.readFile)
.then(async buffer => {
logger.debug("Read File Success "); //TODO move this to previous then?
return sharp(buffer).jpeg().toBuffer()
.then(file => {
logger.debug("Sharp JPEG Success");
return file
})
})
.then(async file => {
if (!dryrun) {
// await sleep(random(1000, 60000)) //TODO is this necessary?
return ig.publish.story({ file })
.then(fb => logger.info("Posting successful!?"))
}
else return logger.info("Data not sent, dryrun = true")
})
.then(() => logger.info(`Next post scheduled for ${job.nextDates()}\n`))
.catch(logger.error)
}
})();
//=================================================================================
async function login() {
ig.state.generateDevice(process.env.IG_USERNAME);
// ig.state.proxyUrl = process.env.IG_PROXY;
//register callback?
ig.request.end$.subscribe(async () => {
const serialized = await ig.state.serialize();
delete serialized.constants; // this deletes the version info, so you'll always use the version provided by the library
await stateSave(serialized);
});
if (await stateExists()) {
// import state accepts both a string as well as an object
// the string should be a JSON object
const stateObj = await stateLoad();
await ig.state.deserialize(stateObj)
.catch(err => logger.debug("deserialize: " + err));
} else {
let standardLogin = async function() {
// login like normal
await ig.simulate.preLoginFlow();
logger.debug("preLoginFlow finished");
await ig.account.login(process.env.IG_USERNAME, process.env.IG_PASSWORD);
logger.info("Logged in as " + process.env.IG_USERNAME);
process.nextTick(async () => await ig.simulate.postLoginFlow());
logger.debug("postLoginFlow finished");
}
// Perform usual login
// If 2FA is enabled, IgLoginTwoFactorRequiredError will be thrown
return Bluebird.try(standardLogin)
.catch(
IgLoginTwoFactorRequiredError,
async err => {
logger.info("Two Factor Auth Required");
const {username, totp_two_factor_on, two_factor_identifier} = err.response.body.two_factor_info;
// decide which method to use
const verificationMethod = totp_two_factor_on ? '0' : '1'; // default to 1 for SMS
// At this point a code should have been sent
// Get the code
const { code } = await inquirer.prompt([
{
type: 'input',
name: 'code',
message: `Enter code received via ${verificationMethod === '1' ? 'SMS' : 'TOTP'}`,
},
]);
// Use the code to finish the login process
return ig.account.twoFactorLogin({
username,
verificationCode: code,
twoFactorIdentifier: two_factor_identifier,
verificationMethod, // '1' = SMS (default), '0' = TOTP (google auth for example)
trustThisDevice: '1', // Can be omitted as '1' is used by default
});
},
)
.catch(e => logger.error('An error occurred while processing two factor auth', e, e.stack));
}
return
//================================================================================
async function stateSave(data) {
// here you would save it to a file/database etc.
await fsp.mkdir(path.dirname(statePath), { recursive: true }).catch(logger.error);
return fsp.writeFile(statePath, JSON.stringify(data))
// .then(() => logger.info('state saved, daddy-o'))
.catch(err => logger.error("Write error" + err));
}
async function stateExists() {
return fsp.access(statePath, fs.constants.F_OK)
.then(() => {
logger.debug('Can access state info')
return true
})
.catch(() => {
logger.warn('Cannot access state info')
return false
});
}
async function stateLoad() {
// here you would load the data
return fsp.readFile(statePath, 'utf-8')
.then(data => JSON.parse(data))
.then(data => {
logger.info("State load successful");
return data
})
.catch(logger.error)
}
}
async function registerFileUsed( filepath ) {
let data = JSON.stringify({
path: filepath,
time: new Date().toISOString()
}) + '\n';
return fsp.appendFile(recordPath, data, { encoding: 'utf8', flag: 'a+' } )
.then(() => {
logger.debug("Writing filename to record file");
return filepath
})
}
function pickUnusedFileFrom( filenames, iMax = 1000) {
return new Promise((resolve, reject) => {
let checkFileUsed = async function ( filepath ) {
return fsp.readFile(recordPath, 'utf8')
.then(data => data.split('\n'))
.then(arr => arr.filter(Boolean))
.then(arr => arr.map(JSON.parse))
.then(arr => arr.some(entry => entry.path === filepath))
}
let trythis = function( iMax, i = 1) {
let file = random(filenames);
checkFileUsed(file)
.then(async used => {
if (!used) {
logger.info(`Unused file found! ${file}`);
resolve(file);
} else if (i < iMax) {
logger.debug(`Try #${i}: File ${file} used already`);
await sleep(50);
trythis(iMax, ++i)
} else {
reject(`I tried ${iMax} times and all the files I tried were previously used`)
}
})
.catch(err => {
logger.warn("Record file not found, saying yes to " + file);
resolve(file);
})
}( iMax );
})
}
Dose the Kuzzle or Minio development teams have a working example of using the Kuzzle S3 plugin for Minio? I have the following but my file isnt being uploaded and the pre-signed url is referring to https://your-s3-bucket.s3.eu-west-3.amazonaws.com/
const fs = require("fs");
const fsPromises = require('fs').promises;
// Create a JS File object instance from a local path using Node.js
const fileObject = require("get-file-object-from-local-path");
// Promise based HTTP client for the browser and node.js
const axios = require('axios');
// Loads the Kuzzle SDK modules
const {
Kuzzle,
WebSocket
} = require('kuzzle-sdk');
var start = new Date();
const webSocketOptionsObject = {
"autoReconnect": true,
"ssl": true,
"port": 443
};
const kuzzle = new Kuzzle(new WebSocket('myurl.com', webSocketOptionsObject));
const credentials = { username: 'xyz123', password: 'fithenmgjtkj' };
const path = __dirname + "\\" + "yellow_taxi_data.csv"; // the "\\" is for Windows path
var fileData = {};
// check file exists
fs.access(path, fs.F_OK, (err) => {
if (err) {
console.error(err)
return
}
fileData = new fileObject.LocalFileData(path);
// Adds a listener to detect connection problems
kuzzle.on('networkError', error => {
console.error('Network Error:', error);
});
});
const connectToKuzzle = async () => {
// Connects to the Kuzzle server
await kuzzle.connect();
return await kuzzle.auth.login('local', credentials);
// console.log('jwt auth token: ', jwt);
}
const disConnectFromKuzzle = async () => {
console.log('Disconnected from Kuzzle');
kuzzle.disconnect();
var time = new Date() - start;
// sec = Math.floor((time/1000) % 60);
console.log('Execution time in milliseconds: ', time);
}
const presignedURL = async () => {
// Get a Presigned URL
const result = await kuzzle.query({
controller: 's3/upload',
action: 'getUrl',
uploadDir: 'proxybucket', // directory name inside the Bucket specified in the s3 plugin bucket name
filename: fileData.name
});
console.log("result: ", result);
return result;
}
const loadFileStream = async () => {
console.log('getting file: ', path);
targetFile = null;
await fs.promises.readFile(path)
.then(function (result) {
console.log("file loaded------", result.length);
targetFile = result;
})
.catch(function (error) {
console.log(error);
return;
});
return targetFile;
}
const kuzzleValidate = async (kuzzleResource) => {
// console.log("kuzzleResource: ", kuzzleResource.result.fileKey);
// validate
// Validate and persist a previsously uploaded file.
// https://docs.kuzzle.io/official-plugins/s3/2/controllers/upload/validate/
const Presult = await kuzzle.query({
// Kuzzle API params
"controller": "s3/upload",
"action": "validate",
// File key in S3 bucket
"fileKey": kuzzleResource.result.fileKey
});
console.log('validate: ', Presult.result.fileUrl);
}
const uploadFile = async (fileBuffer, kuzzleResource, jwt) => {
// options at https://github.com/axios/axios
const axiosOptions = {
headers: {
'Content-Type': fileData.type
},
maxBodyLength: 200000000 // 200,000,000 bytes 200 Mb
};
// PUT the fileBuffer to the Kuzzle S3 endpoint
// https://github.com/axios/axios
axios.defaults.headers.common['Authorization'] = jwt;
const response = await axios.put(kuzzleResource.result.uploadUrl, fileBuffer, axiosOptions)
.then((response) => {
console.log('file uploaded......');
})
.catch(function (error) {
console.log("File upload error: ", error);
return;
});
return "Upload successful";
}
if (fileData) {
connectToKuzzle().then((jwt) => {
console.log(jwt);
// upload(jwt);
presignedURL().then((kuzzleResource) => {
loadFileStream().then((fileBuffer) => {
uploadFile(fileBuffer, kuzzleResource, jwt).then((doneMessage) => {
console.log("doneMessage: ", doneMessage);
}).then(() => {
kuzzleValidate(kuzzleResource).then(() => {
disConnectFromKuzzle();
});
});
});
});
});
}
I'm looking to upload to a Minio bucket and obtain a pre-signedURL so I can store it in a document later.
You can change the endpoint configuration to set a different s3-compatible endpoint who can be a Minio one.
This configuration can be changer under the plugins.s3.endpoint key. You should also disable the usage of default s3 path.
Example:
app.config.set('plugins.s3.endpoint', 'https://minio.local');
app.config.set('plugins.s3.s3ClientOptions.s3ForcePathStyle', false);
I'm trying to get the progress of a 1 minute video uploading to firebase bucket storage using the admin sdk. I've seen a lot about using firebase.storage().ref.child..... but I'm unable to do that with the admin sdk since they don't have the same functions. This is my file upload:
exports.uploadMedia = (req, res) => {
const BusBoy = require('busboy');
const path = require('path');
const os = require('os');
const fs = require('fs');
const busboy = new BusBoy({ headers: req.headers, limits: { files: 1, fileSize: 200000000 } });
let mediaFileName;
let mediaToBeUploaded = {};
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
if(mimetype !== 'image/jpeg' && mimetype !== 'image/png' && mimetype !== 'video/quicktime' && mimetype !== 'video/mp4') {
console.log(mimetype);
return res.status(400).json({ error: 'Wrong file type submitted, only .png, .jpeg, .mov, and .mp4 files allowed'})
}
// my.image.png
const imageExtension = filename.split('.')[filename.split('.').length - 1];
//43523451452345231234.png
mediaFileName = `${Math.round(Math.random()*100000000000)}.${imageExtension}`;
const filepath = path.join(os.tmpdir(), mediaFileName);
mediaToBeUploaded = { filepath, mimetype };
file.pipe(fs.createWriteStream(filepath));
file.on('limit', function(){
fs.unlink(filepath, function(){
return res.json({'Error': 'Max file size is 200 Mb, file size too large'});
});
});
});
busboy.on('finish', () => {
admin
.storage()
.bucket()
.upload(mediaToBeUploaded.filepath, {
resumable: false,
metadata: {
metadata: {
contentType: mediaToBeUploaded.mimetype
}
}
})
.then(() => {
const meadiaUrl = `https://firebasestorage.googleapis.com/v0/b/${config.storageBucket}/o/${mediaFileName}?alt=media`;
return res.json({mediaUrl: meadiaUrl});
})
.catch((err) => {
console.error(err);
return res.json({'Error': 'Error uploading media'});
});
});
req.pipe(busboy);
}
This works okay right now, but the only problem is that the user can't see where their 1 or 2 minute video upload is at. Currently, it's just a activity indicator and the user just sits their waiting without any notice. I'm using react native on the frontend if that helps with anything. Would appreciate any help!
I was able to implement on the client side a lot easier... but it works perfect with image and video upload progress. On the backend, I was using the admin sdk, but frontend I was originally using the firebase sdk.
this.uploadingMedia = true;
const imageExtension = this.mediaFile.split('.')[this.mediaFile.split('.').length - 1];
const mediaFileName = `${Math.round(Math.random()*100000000000)}.${imageExtension}`;
const response = await fetch(this.mediaFile);
const blob = await response.blob();
const storageRef = storage.ref(`${mediaFileName}`).put(blob);
storageRef.on(`state_changed`,snapshot=>{
this.uploadProgress = (snapshot.bytesTransferred/snapshot.totalBytes);
}, error=>{
this.error = error.message;
this.submitting = false;
this.uploadingMedia = false;
return;
},
async () => {
storageRef.snapshot.ref.getDownloadURL().then(async (url)=>{
imageUrl = [];
videoUrl = [url];
this.uploadingMedia = false;
this.submitPost(imageUrl, videoUrl);
});
});
export const uploadFile = (
folderPath,
fileName,
file,
generateDownloadURL = true,
updateInformationUploadProgress
) => {
return new Promise((resolve, reject) => {
try {
const storageRef = firebaseApp.storage().ref(`${folderPath}/${fileName}`)
const uploadTask = storageRef.put(file)
uploadTask.on(
'state_changed',
snapshot => {
if (updateInformationUploadProgress) {
const progress =
(snapshot.bytesTransferred / snapshot.totalBytes) * 100
updateInformationUploadProgress({
name: fileName,
progress: progress,
})
}
},
error => {
console.log('upload error: ', error)
reject(error)
},
() => {
if (generateDownloadURL) {
uploadTask.snapshot.ref
.getDownloadURL()
.then(url => {
resolve(url)
})
.catch(error => {
console.log('url error: ', error.message)
reject(error)
})
} else {
resolve(uploadTask.snapshot.metadata.fullPath)
}
}
)
} catch (error) {
reject(error)
}
})
}
I've written a cloud function which resizes images after uploading to cloud storage then returns the signed URL for the image uploaded but when I try to write the URL afterwards to firestore I get a PERMISSION_DENIED: Missing or insufficient permissions error. Is there a way around this without changing the firestore rules to allow anyone read/write access?
Below is the code:
import * as functions from "firebase-functions";
import * as admin from "firebase-admin";
import * as fs from "fs-extra";
import { tmpdir } from "os";
import { join, dirname } from "path";
import * as sharp from "sharp";
admin.initializeApp({
credential: admin.credential.applicationDefault()
});
const gcs = admin.storage();
const db = admin.firestore();
export const generateThumbs = functions.storage
.object()
.onFinalize(async object => {
const bucket = gcs.bucket(object.bucket);
const filePath = object.name as string;
const fileName = filePath.split("/").pop();
const bucketDir = dirname(filePath);
const workingDir = join(tmpdir(), "thumbs");
const tmpFilePath = join(workingDir, "source.png");
if (fileName?.includes("#") || !object.contentType?.includes("image")) {
console.log("exists, returning false...");
return false;
}
// Ensure thumbnail dir exists
await fs.ensureDir(workingDir);
// Download source file
await bucket.file(filePath).download({
destination: tmpFilePath
});
// Resize the images and define an array of upload promises
const sizes = [64, 128, 320, 640];
const uploadPromises = sizes.map(async size => {
const imageName = fileName?.split(".")[0];
const imageExt = fileName?.split(".")[1];
const thumbName = `${imageName}#${size}.${imageExt}`;
const thumbPath = join(workingDir, thumbName);
// Resize source images
return sharp(tmpFilePath)
.resize(size, size)
.toFile(thumbPath)
.then(outputInfo => {
// Upload to GCS
return bucket
.upload(thumbPath, {
destination: join(bucketDir, thumbName)
})
.then(async res => {
return res[0]
.getSignedUrl({
action: "read",
expires: "01-01-2040"
})
.then(signedUrlRes => {
console.log(`url: ${signedUrlRes[0]}`);
// const docRef = db
// .collection("/cities")
// .doc(imageName?.split("_")[0] as string);
// return db.runTransaction(t => {
// t.set(docRef, {
// imageUrl: signedUrlRes[0]
// });
// return Promise.resolve();
// });
return db
.collection("/cities")
.doc(imageName?.split("_")[0] as string)
.set({
imageUrl: signedUrlRes[0]
})
.then(res => console.log("written"))
.catch(e => {
console.log("Firebase write error");
console.log(e);
throw Error(e);
});
})
.catch(e => {
console.log(e);
throw Error(e);
});
});
});
});
// Run upload operations
await Promise.all(uploadPromises).catch(e => {
console.log(e);
throw Error(e.message);
});
return fs.remove(workingDir).catch(e => {
console.log(e);
throw Error(e.message);
});
});
changed admin.initializeApp() from
admin.initializeApp({
credential: admin.credential.applicationDefault()
});
to
admin.initializeApp(functions.config().firebase);