upload image array to firebase react native - node.js

it is uploding to firebase cloud stroge but not return downloade url to array
i want to returen ore set download url to arry
How can i solve this problem
const uploadImage = () => {
var promises = uploadUrl.map(async (image, index) => {
let filename = image.substring(image.lastIndexOf('/') + 1);
const task = storage().ref(`complaintPhotos/${filename}`).putFile(image);
// promises.push(task);
task.on('state_changed', taskSnapshot => {
console.log(
`${taskSnapshot.bytesTransferred} transferred out of ${taskSnapshot.totalBytes}`,
);
});
try {
await task;
await storage()
.ref(`complaintPhotos/${filename}`)
.getDownloadURL()
.then(url => {
setUploadUri(prevState => [...prevState, url]);
});
} catch (e) {
console.log(e);
}
});
};

Related

one of my friend is trying to automate a process in which the bot will post instagram stories as a video from a specific folder

below is the working code in which it can post images but is there any way i can also share videos as instagram story?
the error i get when i try to post video instead of image are:**
error image
PS D:\Softwares\programming\Insta Bot\story> node index.js
18:45:11 - info: Dry Run Activated
18:45:11 - info: Post() called! ======================
18:45:11 - debug: 1 files found in ./images/
18:45:11 - warn: Record file not found, saying yes to D:\Softwares\programming\Insta Bot\story\images\meme.mp4
18:45:11 - debug: Read File Success
18:45:11 - error: undefined
(MAIN CODE)
index.js
const logger = require("./logger.js")
const { random, sleep } = require('./utils')
require('dotenv').config();
const { IgApiClient, IgLoginTwoFactorRequiredError } = require("instagram-private-api");
const ig = new IgApiClient();
const Bluebird = require('bluebird');
const inquirer = require('inquirer');
const { CronJob } = require('cron');
const path = require("path");
const fs = require("fs");
const fsp = fs.promises;
const sharp = require("sharp");
//==================================================================================
const statePath = "./etc/state.conf";
const recordPath = "./etc/usedfiles.jsonl";
const imgFolderPath = "./images/";
const dryrun = true;
const runOnStart = true;
//==================================================================================
(async () => { // FOR AWAIT
// LOGIN TO INSTAGRAM
if (!dryrun) {
await login();
logger.info("Log In Successful");
} else {
logger.info("Dry Run Activated");
}
// SCHEDULER
// logger.silly("I'm a schedule, and I'm running!! :)");
const job = new CronJob('38 43 * * * *', post, null, true); //https://crontab.guru/
if (!runOnStart) logger.info(`Next few posts scheduled for: \n${job.nextDates(3).join("\n")}\n`);
else post();
// MAIN POST COMMAND
async function post() {
logger.info("Post() called! ======================");
let postPromise = fsp.readdir(imgFolderPath)
.then(filenames => {
if (filenames.length < 1) throw new Error(`Folder ${imgFolderPath} is empty...`)
logger.debug(`${filenames.length} files found in ${imgFolderPath}`);
return filenames;
})
.then(filenames => filenames.map(file => path.resolve(imgFolderPath + file)))
.then(filenames => pickUnusedFileFrom(filenames, filenames.length))
.then(filename => {
if (!dryrun) registerFileUsed(filename)
return filename
})
.then(fsp.readFile)
.then(async buffer => {
logger.debug("Read File Success "); //TODO move this to previous then?
return sharp(buffer).jpeg().toBuffer()
.then(file => {
logger.debug("Sharp JPEG Success");
return file
})
})
.then(async file => {
if (!dryrun) {
// await sleep(random(1000, 60000)) //TODO is this necessary?
return ig.publish.story({ file })
.then(fb => logger.info("Posting successful!?"))
}
else return logger.info("Data not sent, dryrun = true")
})
.then(() => logger.info(`Next post scheduled for ${job.nextDates()}\n`))
.catch(logger.error)
}
})();
//=================================================================================
async function login() {
ig.state.generateDevice(process.env.IG_USERNAME);
// ig.state.proxyUrl = process.env.IG_PROXY;
//register callback?
ig.request.end$.subscribe(async () => {
const serialized = await ig.state.serialize();
delete serialized.constants; // this deletes the version info, so you'll always use the version provided by the library
await stateSave(serialized);
});
if (await stateExists()) {
// import state accepts both a string as well as an object
// the string should be a JSON object
const stateObj = await stateLoad();
await ig.state.deserialize(stateObj)
.catch(err => logger.debug("deserialize: " + err));
} else {
let standardLogin = async function() {
// login like normal
await ig.simulate.preLoginFlow();
logger.debug("preLoginFlow finished");
await ig.account.login(process.env.IG_USERNAME, process.env.IG_PASSWORD);
logger.info("Logged in as " + process.env.IG_USERNAME);
process.nextTick(async () => await ig.simulate.postLoginFlow());
logger.debug("postLoginFlow finished");
}
// Perform usual login
// If 2FA is enabled, IgLoginTwoFactorRequiredError will be thrown
return Bluebird.try(standardLogin)
.catch(
IgLoginTwoFactorRequiredError,
async err => {
logger.info("Two Factor Auth Required");
const {username, totp_two_factor_on, two_factor_identifier} = err.response.body.two_factor_info;
// decide which method to use
const verificationMethod = totp_two_factor_on ? '0' : '1'; // default to 1 for SMS
// At this point a code should have been sent
// Get the code
const { code } = await inquirer.prompt([
{
type: 'input',
name: 'code',
message: `Enter code received via ${verificationMethod === '1' ? 'SMS' : 'TOTP'}`,
},
]);
// Use the code to finish the login process
return ig.account.twoFactorLogin({
username,
verificationCode: code,
twoFactorIdentifier: two_factor_identifier,
verificationMethod, // '1' = SMS (default), '0' = TOTP (google auth for example)
trustThisDevice: '1', // Can be omitted as '1' is used by default
});
},
)
.catch(e => logger.error('An error occurred while processing two factor auth', e, e.stack));
}
return
//================================================================================
async function stateSave(data) {
// here you would save it to a file/database etc.
await fsp.mkdir(path.dirname(statePath), { recursive: true }).catch(logger.error);
return fsp.writeFile(statePath, JSON.stringify(data))
// .then(() => logger.info('state saved, daddy-o'))
.catch(err => logger.error("Write error" + err));
}
async function stateExists() {
return fsp.access(statePath, fs.constants.F_OK)
.then(() => {
logger.debug('Can access state info')
return true
})
.catch(() => {
logger.warn('Cannot access state info')
return false
});
}
async function stateLoad() {
// here you would load the data
return fsp.readFile(statePath, 'utf-8')
.then(data => JSON.parse(data))
.then(data => {
logger.info("State load successful");
return data
})
.catch(logger.error)
}
}
async function registerFileUsed( filepath ) {
let data = JSON.stringify({
path: filepath,
time: new Date().toISOString()
}) + '\n';
return fsp.appendFile(recordPath, data, { encoding: 'utf8', flag: 'a+' } )
.then(() => {
logger.debug("Writing filename to record file");
return filepath
})
}
function pickUnusedFileFrom( filenames, iMax = 1000) {
return new Promise((resolve, reject) => {
let checkFileUsed = async function ( filepath ) {
return fsp.readFile(recordPath, 'utf8')
.then(data => data.split('\n'))
.then(arr => arr.filter(Boolean))
.then(arr => arr.map(JSON.parse))
.then(arr => arr.some(entry => entry.path === filepath))
}
let trythis = function( iMax, i = 1) {
let file = random(filenames);
checkFileUsed(file)
.then(async used => {
if (!used) {
logger.info(`Unused file found! ${file}`);
resolve(file);
} else if (i < iMax) {
logger.debug(`Try #${i}: File ${file} used already`);
await sleep(50);
trythis(iMax, ++i)
} else {
reject(`I tried ${iMax} times and all the files I tried were previously used`)
}
})
.catch(err => {
logger.warn("Record file not found, saying yes to " + file);
resolve(file);
})
}( iMax );
})
}

Kuzzle / Minio example usage

Dose the Kuzzle or Minio development teams have a working example of using the Kuzzle S3 plugin for Minio? I have the following but my file isnt being uploaded and the pre-signed url is referring to https://your-s3-bucket.s3.eu-west-3.amazonaws.com/
const fs = require("fs");
const fsPromises = require('fs').promises;
// Create a JS File object instance from a local path using Node.js
const fileObject = require("get-file-object-from-local-path");
// Promise based HTTP client for the browser and node.js
const axios = require('axios');
// Loads the Kuzzle SDK modules
const {
Kuzzle,
WebSocket
} = require('kuzzle-sdk');
var start = new Date();
const webSocketOptionsObject = {
"autoReconnect": true,
"ssl": true,
"port": 443
};
const kuzzle = new Kuzzle(new WebSocket('myurl.com', webSocketOptionsObject));
const credentials = { username: 'xyz123', password: 'fithenmgjtkj' };
const path = __dirname + "\\" + "yellow_taxi_data.csv"; // the "\\" is for Windows path
var fileData = {};
// check file exists
fs.access(path, fs.F_OK, (err) => {
if (err) {
console.error(err)
return
}
fileData = new fileObject.LocalFileData(path);
// Adds a listener to detect connection problems
kuzzle.on('networkError', error => {
console.error('Network Error:', error);
});
});
const connectToKuzzle = async () => {
// Connects to the Kuzzle server
await kuzzle.connect();
return await kuzzle.auth.login('local', credentials);
// console.log('jwt auth token: ', jwt);
}
const disConnectFromKuzzle = async () => {
console.log('Disconnected from Kuzzle');
kuzzle.disconnect();
var time = new Date() - start;
// sec = Math.floor((time/1000) % 60);
console.log('Execution time in milliseconds: ', time);
}
const presignedURL = async () => {
// Get a Presigned URL
const result = await kuzzle.query({
controller: 's3/upload',
action: 'getUrl',
uploadDir: 'proxybucket', // directory name inside the Bucket specified in the s3 plugin bucket name
filename: fileData.name
});
console.log("result: ", result);
return result;
}
const loadFileStream = async () => {
console.log('getting file: ', path);
targetFile = null;
await fs.promises.readFile(path)
.then(function (result) {
console.log("file loaded------", result.length);
targetFile = result;
})
.catch(function (error) {
console.log(error);
return;
});
return targetFile;
}
const kuzzleValidate = async (kuzzleResource) => {
// console.log("kuzzleResource: ", kuzzleResource.result.fileKey);
// validate
// Validate and persist a previsously uploaded file.
// https://docs.kuzzle.io/official-plugins/s3/2/controllers/upload/validate/
const Presult = await kuzzle.query({
// Kuzzle API params
"controller": "s3/upload",
"action": "validate",
// File key in S3 bucket
"fileKey": kuzzleResource.result.fileKey
});
console.log('validate: ', Presult.result.fileUrl);
}
const uploadFile = async (fileBuffer, kuzzleResource, jwt) => {
// options at https://github.com/axios/axios
const axiosOptions = {
headers: {
'Content-Type': fileData.type
},
maxBodyLength: 200000000 // 200,000,000 bytes 200 Mb
};
// PUT the fileBuffer to the Kuzzle S3 endpoint
// https://github.com/axios/axios
axios.defaults.headers.common['Authorization'] = jwt;
const response = await axios.put(kuzzleResource.result.uploadUrl, fileBuffer, axiosOptions)
.then((response) => {
console.log('file uploaded......');
})
.catch(function (error) {
console.log("File upload error: ", error);
return;
});
return "Upload successful";
}
if (fileData) {
connectToKuzzle().then((jwt) => {
console.log(jwt);
// upload(jwt);
presignedURL().then((kuzzleResource) => {
loadFileStream().then((fileBuffer) => {
uploadFile(fileBuffer, kuzzleResource, jwt).then((doneMessage) => {
console.log("doneMessage: ", doneMessage);
}).then(() => {
kuzzleValidate(kuzzleResource).then(() => {
disConnectFromKuzzle();
});
});
});
});
});
}
I'm looking to upload to a Minio bucket and obtain a pre-signedURL so I can store it in a document later.
You can change the endpoint configuration to set a different s3-compatible endpoint who can be a Minio one.
This configuration can be changer under the plugins.s3.endpoint key. You should also disable the usage of default s3 path.
Example:
app.config.set('plugins.s3.endpoint', 'https://minio.local');
app.config.set('plugins.s3.s3ClientOptions.s3ForcePathStyle', false);

node javascript file upload doesn't work on remote server

On my local dev machine accessing localhost the following code works beautifully even with network settings changed to "Slow 3G." However, when running on my VPS, it fails to process the file on the server. Here are two different codes blocks I tried (again, both work without issue on local dev machine accessing localhost)
profilePicUpload: async (parent, args) => {
const file = await args.file;
const fileName = `user-${nanoid(3)}.jpg`;
const tmpFilePath = path.join(__dirname, `../../tmp/${fileName}`);
file
.createReadStream()
.pipe(createWriteStream(tmpFilePath))
.on('finish', () => {
jimp
.read(`tmp/${fileName}`)
.then(image => {
image.cover(300, 300).quality(60);
image.writeAsync(`static/uploads/users/${fileName}`, jimp.AUTO);
})
.catch(error => {
throw new Error(error);
});
});
}
It seems like this code block doesn't wait long enough for the file upload to finish since if I check the storage location on the VPS, I see this:
I also tried the following with no luck:
profilePicUpload: async (parent, args) => {
const { createReadStream } = await args.file;
let data = '';
const fileStream = await createReadStream();
fileStream.setEncoding('binary');
// UPDATE: 11-2
let i = 0;
fileStream.on('data', chunk => {
console.log(i);
i++;
data += chunk;
});
fileStream.on('error', err => {
console.log(err);
});
// END UPDATE
fileStream.on('end', () => {
const file = Buffer.from(data, 'binary');
jimp
.read(file)
.then(image => {
image.cover(300, 300).quality(60);
image.writeAsync(`static/uploads/users/${fileName}`, jimp.AUTO);
})
.catch(error => {
throw new Error(error);
});
});
}
With this code, I don't even get a partial file.
jimp is a JS library for image manipulation.
If anyone has any hints to get this working properly, I'd appreciate it very much. Please let me know if I'm missing some info.
I was able to figure out a solution by referring to this article: https://nodesource.com/blog/understanding-streams-in-nodejs/
Here is my final, working code:
const { createWriteStream, unlink } = require('fs');
const path = require('path');
const { once } = require('events');
const { promisify } = require('util');
const stream = require('stream');
const jimp = require('jimp');
profilePicUpload: async (parent, args) => {
// have to wait while file is uploaded
const { createReadStream } = await args.file;
const fileStream = createReadStream();
const fileName = `user-${args.uid}-${nanoid(3)}.jpg`;
const tmpFilePath = path.join(__dirname, `../../tmp/${fileName}`);
const tmpFileStream = createWriteStream(tmpFilePath, {
encoding: 'binary'
});
const finished = promisify(stream.finished);
fileStream.setEncoding('binary');
// apparently async iterators is the way to go
for await (const chunk of fileStream) {
if (!tmpFileStream.write(chunk)) {
await once(tmpFileStream, 'drain');
}
}
tmpFileStream.end(() => {
jimp
.read(`tmp/${fileName}`)
.then(image => {
image.cover(300, 300).quality(60);
image.writeAsync(`static/uploads/users/${fileName}`, jimp.AUTO);
})
.then(() => {
unlink(tmpFilePath, error => {
console.log(error);
});
})
.catch(error => {
console.log(error);
});
});
await finished(tmpFileStream);
}

PDF download fails using PDFMake in NodeJS

I added to my NodeJS API an endpoint to be able to generate and download a PDF using the library PDFMake. I'm able to generate and upload the PDF on the server-side but on browser site, the PDF is downloaded and failed with 0 KBytes on it and I stack trying to find a solution for it.
I understood that the file is downloaded before the writeStream finish to write it but adding an extra function as like:
pdf.on("finish", async () => {
res.download(pdf);
res.send("PDF generated");
});
This didn't help but added an extra error of:
UnhandledPromiseRejectionWarning: RangeError [ERR_HTTP_INVALID_STATUS_CODE]: Invalid status code: undefined
I have no idea how to solve this and hope for help. Most probably is something I'm missing.
The code responsible for the functionalities are:
Router
router.get("/pdf/all", async (req, res) => {
const movies = await movie.getMovies();
try {
const pdf = await generatePdf(movies, "all");
pdf.on("finish", async () => {
res.download(pdf);
res.send("PDF generated");
});
} catch (err) {
// Errors
res.status(err.status).json({ message: err.message });
throw new Error(error.message);
}
});
generatePdf
const generatePdf = (movies, name) => {
return new Promise((resolve, reject) => {
try {
let fonts = {
Roboto: {
normal: "Helvetica",
bold: "Helvetica-Bold",
italics: "Helvetica-Oblique",
bolditalics: "Helvetica-BoldOblique"
}
};
let printer = new pdfMaker(fonts);
const pdfTemplate = template(movies);
const pdfStream = printer.createPdfKitDocument(pdfTemplate, {});
const filePath = path.join(
__dirname,
uploads + "/" + pdfDir + `${name}.pdf`
);
console.log(filePath);
pdfStream.pipe(writeStream(filePath));
pdfStream.end();
resolve(filePath);
} catch (err) {
console.log(err);
reject(err);
}
});
};
I would suggest you try the following code for generatePdf:
const generatePdf = (movies, name) => {
return new Promise((resolve, reject) => {
try {
let fonts = {
Roboto: {
normal: "Helvetica",
bold: "Helvetica-Bold",
italics: "Helvetica-Oblique",
bolditalics: "Helvetica-BoldOblique"
}
};
let printer = new pdfMaker(fonts);
const pdfTemplate = template(movies);
const pdfStream = printer.createPdfKitDocument(pdfTemplate, {});
const filePath = path.join(
__dirname,
uploads + "/" + pdfDir + `${name}.pdf`
);
console.log(filePath);
let stream = pdfStream.pipe(writeStream(filePath));
stream.on('finish', function(){
pdfStream.end();
resolve(filePath);
}
} catch (err) {
console.log(err);
reject(err);
}
});
};
The code should wait for the pdfStream.pipe to finish before finishing the file and resolving the filePath
Also you cannot use res.send after res.download as the download sets the response to be the file and you can no longer send another response to the client.

Puppeteer wrong result with evaluate() & exposeFunction()

I ran the following and it appears to gather a large number of links, however on actual inspection of the site with collectLinks1 I get all valid links, but with collectLinks2 I got 59 iterations of http://pieroxy.net/blog/2014/11/18/[
I'm new to Puppeteer and I can't find out why with collectLinks2 I don't get the links.
const { parse, resolve } = require('url');
const trim = require('lodash/trim');
const startsWith = require('lodash/startsWith');
const includes = require('lodash/includes');
// https://github.com/GoogleChrome/puppeteer
const puppeteer = require('puppeteer');
// https://github.com/gwuhaolin/chrome-finder
const findChrome = require('chrome-finder');
function resolveUrl(url, baseUrl) {
url = trim(url);
if (!url) return null;
if (startsWith(url, '#')) return null;
const { protocol } = parse(url);
if (includes(['http:', 'https:'], protocol)) {
return url.split('#')[0];
} if (!protocol) {
return resolve(baseUrl, url).split('#')[0];
}
return null;
}
async function collectLinks1(htmlPage) {
const baseUrl = htmlPage.url();
const links = [];
const assetUrls = await htmlPage.$$eval('a[href]', assetLinks => assetLinks.map(link => link.href));
assetUrls.forEach(link => {
const _link = resolveUrl(link, baseUrl);
if (_link) links.push(_link);
});
return links;
}
async function collectLinks2(htmlPage) {
const baseUrl = htmlPage.url();
const links = [];
await htmlPage.exposeFunction('pushToLinks', link => {
const _link = resolveUrl(link, baseUrl);
if (_link) links.push(_link);
});
await htmlPage.evaluate(() => {
function findLinks(document) {
document.querySelectorAll('a[href]')
.forEach(link => {
window.pushToLinks(link.href);
});
}
findLinks(window.document);
});
return links;
}
const crawl = async url => {
try {
console.log(`Crawling ${url}`);
const browser = await puppeteer.launch({
headless: false,
executablePath: findChrome(),
});
const page = await browser.newPage();
await page.goto(url);
// OK
const links1 = await collectLinks1(page);
links1.forEach(link => { console.log(link); });
// KO
const links2 = await collectLinks2(page);
links2.forEach(link => { console.log(link); });
await browser.close();
} catch (err) {
console.log(err);
}
};
crawl('http://pieroxy.net/blog/2014/11/18/user_agent_detection_in_java.html');
You need to await the function defined via page.exposeFunction as it returns a Promise. As you are only calling the function but not awaiting its result, your page.evaluate call will resolve before your script finished executing.
Solution
Instead of the forEach, you should use a loop to iterate over all the items and communicate them to the page one after another.
async function collectLinks2(htmlPage) {
// ...
await htmlPage.evaluate(async () => {
async function findLinks(document) {
for (const link of document.querySelectorAll('a[href]')) {
await window.pushToLinks(link.href);
}
}
await findLinks(window.document);
});
return links;
}

Resources