Code below has a flaw as I am getting array of undefined:
let filters = [];
async function getFilters(tiers) {
return await Promise.all(
tiers.map(async t => {
let id = new ObjectId(t.filter);
filters.push(
await conn.collection('TierScheduleFilter').find({
_id: id
}).toArray(function(err, filter) {
if (err || !filter) {
reject('no filter || error');
}
return filter;
});
);
});
);
}
await getFilters(tiers);
console.log(filters); // 4 filters => [ undefined, undefined, undefined, undefined ]
The code shall retrieve all filters but its all undefined values.
This one seems to be a proper approach:
let filters = [];
async function getFilters(tiers) {
return await Promise.all(
tiers.map(async t => {
let id = new ObjectId(t.filter);
try {
return await conn.collection('TierScheduleFilter').findOne({ _id: id })
} catch (e) {
return e;
}
}))
}
Related
So my issue is simple yet challenging. I am trying to transfer data from one ledger to another. Now for that, I am reading a whole table of data then creating an object for every document than inserting these documents one by one into the other ledger table
Ledger#1 Table1 -> get all data -> convert all data to array of objects -> transfer to Ledger#2 Table1 one by one
The problem is that I cannot create the object from the document. I do this manually by using prototype functions and reading field type and then creating a thing that is messy and causes some data to become null. So I was wondering if there was a better way that is less prone to errors.
I asked a question of migrating ledger but had no luck in getting any response. Please help me in this.
Following is my code. Please copy and paste it inside an IDE so you can better understand
const getValueOfField = (field) => {
const name = field.getType().name;
switch (name) {
case "string":
return field.stringValue();
case "int":
return field.numberValue();
case "null":
return null;
default:
return null;
}
};
const enterDataInNewLedger = async (tableData, tableName) => {
const awsProductionDriver = awsProduction();
console.log(`Starting to insert data inside table ${tableName}`);
try {
for (const data of tableData) {
await awsProductionDriver.executeLambda(async (txn) => {
await txn.execute(`INSERT INTO ${tableName} ?`, data);
});
}
console.log(`Done inserting data inside ${tableName}`);
return { success: true };
} catch (err) {
console.log(err.message);
return { success: false, message: err.message };
}
};
const dataTransferOfTable = async (table) => {
const prodDriver = awsProd();
try {
const allTableData = await prodDriver.executeLambda(async (txn) => {
const result = await txn.execute(`SELECT * FROM ${table.name}`);
const resultList = result.getResultList();
let completeResults = [];
for (const doc of resultList) {
let newDoc = {};
const fields = doc.fields();
for (const field of fields) {
newDoc[field[0]] = getValueOfField(field[1]);
}
completeResults.push(newDoc);
}
return completeResults;
});
const response = await enterDataInNewLedger(allTableData, table.name);
checkForErrors(response);
return { success: true };
} catch (err) {
console.log(err.message);
return { success: false, message: err.message };
}
};
const startDataTransferFromOneLedgerToAnother = async () => {
try {
for (let table of tableName) {
const response = await dataTransferOfTable(table);
checkForErrors(response);
}
} catch (err) {
console.log(err.message);
}
};
startDataTransferFromOneLedgerToAnother();
So apparently I could've done this easily. I was just checking it and figured out the solution.
I can insert the whole fetched document and it will be same so my converted code is as follows
const { awsMainFunction: awsProd } = require("./awsProdConfig");
const { awsMainFunction: awsProduction } = require("./awsProductionConfig");
const { tableNamesAndIndeces: tableName, checkForErrors } = require("./utils");
const enterDataInNewLedger = async (tableData, tableName) => {
const awsProductionDriver = awsProduction();
console.log(`Starting to insert data inside table ${tableName}`);
try {
for (const data of tableData) {
await awsProductionDriver.executeLambda(async (txn) => {
await txn.execute(`INSERT INTO ${tableName} ?`, data);
});
}
console.log(`Done inserting data inside ${tableName}`);
return { success: true };
} catch (err) {
console.log(err.message);
return { success: false, message: err.message };
}
};
const dataTransferOfTable = async (table) => {
const prodDriver = awsProd();
try {
const allTableData = await prodDriver.executeLambda(async (txn) => {
const result = await txn.execute(`SELECT * FROM ${table.name}`);
return result.getResultList();
});
const response = await enterDataInNewLedger(allTableData, table.name);
checkForErrors(response);
return { success: true };
} catch (err) {
console.log(err.message);
return { success: false, message: err.message };
}
};
const startDataTransferFromOneLedgerToAnother = async () => {
try {
for (let table of tableName) {
const response = await dataTransferOfTable(table);
checkForErrors(response);
}
} catch (err) {
console.log(err.message);
}
};
startDataTransferFromOneLedgerToAnother();
I need help with code below. I get an array of items from the client then the goal is to save them in mongodb and return the list classified as 'saved' and 'failed' items. sample of failed items are those that are duplicate on a unique attribute.
I know the code below will not work because of variable scope. how do i get around it? the code below returns an empty array for both savedItems and failedItems. Thanks!
router.post('/addItems', async (req, res, next) => {
let items = req.body;
let result = {
savedItems: [],
failedItems: []
};
function saveData() {
for (i = 0; i < items.length; i++) {
item = items[i];
Model.create({ ...item }, (err, data) => {
if (err) {
result.failedItems.push(item);
} else {
result.savedItems.push(item);
}
});
}
return result;
}
saveData().then(result => {
res.send({
results: result
});
});
});
router.post('/addItems', async (req, res, next) => {
// use try catch when use async
try {
let items = req.body;
let result = {
savedItems: [],
failedItems: []
};
for (let i = 0; i < items.length; i++) {
const item = items[i];
// use the returned promise instead of callback for Model.create
const data = await Model.create({ ...item });
result.savedItems.push(item);
// if also need to handle failed item in result use anathor try catch inside
/*try {
const data = await Model.create({ ...item });
result.savedItems.push(item);
} catch( err ) {
result.failedItems.push(item);
}*/
}
res.send({
results: result
});
} catch( err ) {
// To all the errors unexpected errors + thrown rejected promises
res.send({
error: err
});
}
});
Your saveData method didn't return a promise, try this
function saveData() {
return new Promise(resolve => {
let items = req.body;
let result = {
savedItems: [],
failedItems: []
};
let promises = [];
for (i = 0; i < items.length; i++) {
item = items[i];
let promise = new Promise(resolve => {
Model.create({ ...item }, (err, data) => {
if (err) {
result.failedItems.push(item);
} else {
result.savedItems.push(item);
}
resolve();
});
});
promises.push(promise);
}
Promise.all(promises).then(() => resolve(result));
})
}
In the below code I am trying to upsert to user records. If either record fails to upsert, I would like both records to be rolled back.
In the below code I force the second record to fail by setting user_id = null. However, it still hits the then block return result;. It does not catch/throw an error or rollback the transaction. I also see this error in my logs:
Unhandled rejection SequelizeValidationError: notNull Violation: User.user_id cannot be null
at Promise.all.then
async function submitUsers(users) {
return db.sequelize.transaction(async (tx) => {
const queries = users.map((user, index) => {
if (index == 1) {
user.user_id = null;
}
User.upsert(user, tx);
});
await Promise.all(queries);
}).then((result) => {
return result;
}).catch((e) => {
throw e;
});
}
const users = [ {user_id: 1}, {user_id: 2}];
await submitUsers(users);
you are not returning your promises correctly. try this;
async function submitUsers(users) {
return db.sequelize.transaction((tx) => {
const queries = users.map((user, index) => {
if (index == 1) {
user.user_id = null;
}
return User.upsert(user, tx);
});
return Promise.all(queries);
}).then((result) => {
return result;
}).catch((e) => {
throw e;
});
}
This code either runs once or a max of 100 times. I have a dummy data file with 6000 records as this is the average that it will have to handle.
Currently using the Blaze plan.
The code was working somewhat, I set up a new project and now I get this issue.
export const uploadPatrons = functions.storage
.object()
.onFinalize((object, context) => {
let patronPromise: any[];
patronPromise = [];
if (object.name === 'patrons/upload.csv') {
admin
.storage()
.bucket()
.file('/patrons/upload.csv')
.download({})
.then(data => {
Papa.parse(data.toString(), {
header: true,
skipEmptyLines: true,
complete: result => {
result.data.forEach(x => {
x.inside = false;
x.arrived = false;
x.img = false;
x.arrivedTime = null;
const newPromise = admin
.firestore()
.collection('patrons')
.add({ ...x })
.then(doc => {
console.log(doc);
})
.catch(err => {
console.log(err);
});
patronPromise.push(newPromise);
});
}
});
})
.catch(err => {
console.log(err);
});
}
return Promise.all(patronPromise)
.catch(err => {
console.log(err);
});
});
All it has to do is read the file from the storage, parse it and add each record to the firebase collection
Function returned undefined, expected Promise or value
This is the error I get in the logs
Because of your first promise may be shutdown even it not finish. So try to follow the rule promise/always-return
export const uploadPatrons = functions.storage
.object()
.onFinalize((object, context) => {
if (object.name === 'patrons/upload.csv') {
return admin.storage().bucket()
.file('/patrons/upload.csv')
.download({})
.then(data => {
let patronPromise: any[];
patronPromise = [];
Papa.parse(data.toString(), {
header: true,
skipEmptyLines: true,
complete: result => {
result.data.forEach(x => {
x.inside = false;
x.arrived = false;
x.img = false;
x.arrivedTime = null;
const newPromise = admin.firestore()
.collection('patrons')
.add({
...x
})
patronPromise.push(newPromise);
});
}
});
return Promise.all(patronPromise)
})
.then(result=>{
//return Promise.resolve or something
})
.catch(err=>{
console.log(err)
})
}
else{
//also return if it's nothing
}
});
You're ignoring the promise that admin.storage().bucket().file('/patrons/upload.csv').download({}) returns, which means that the function may get aborted.
I think it should be closer to this:
export const uploadPatrons = functions.storage
.object()
.onFinalize((object, context) => {
let patronPromise: any[];
patronPromise = [];
if (object.name === 'patrons/upload.csv') {
return admin.storage().bucket()
.file('/patrons/upload.csv')
.download({})
.then(data => {
Papa.parse(data.toString(), {
header: true,
skipEmptyLines: true,
complete: result => {
result.data.forEach(x => {
x.inside = false;
x.arrived = false;
x.img = false;
x.arrivedTime = null;
const newPromise = admin.firestore()
.collection('patrons')
.add({
...x
})
patronPromise.push(newPromise);
});
// TODO: return the Promise.all(patronPromise) here
}
});
})
}
});
I'm trying to write a chain of Promises but the last .then() is being called multiple times and I don't know why. The last .then() must run a single time because it will call another API passing result as body.
I know that is being called multiple times because I'm logging as console.log().
What is wrong on my code? For my understand, then() should wait promise returns something.
app.post('/router/join', function(req, res){
let data = req.body;
sessions.validate(data)
.then(result => {
return {
authenticated: (result.code === 201)
};
})
.then(result => {
if(result.authenticated){
return contacts.getContacts(data.tenant_id).then(cs => {
let json = merge(result, cs.data);
return Promise.all(cs.data.items.map(contact => {
return messages.getLastMessage(data.tenant_id, contact.item.contact_id, data.hash_id)
.then(result => {
contact.item.last_message = result.code === 200 && result.data.length > 0 ? result.data[0] : null;
return contact;
});
})).then(result => {
json.items = result;
return json;
});
});
} else {
return result;
}
})
.then(result => {
//this call should run after all other promises and only a single time
let event = result.authenticated ? 'valid_session' : 'invalid_session';
console.log('222');
proxy.send(event, result)}
)
.catch(err => {
console.log('333');
proxy.send('invalid_session', {socket_id: data.socket_id})
})
res.status(201).send({});
});
You can use async/await to clean it up. Inside async functions you can await the results of promises.
app.post('/router/join', async function (req, res, next) {
try {
let data = req.body;
let {code} = await sessions.validate(data);
let result = { authenticated: (code === 201) };
if (result.authenticated) {
let cs = await contacts.getContacts(data.tenant_id);
let json = merge(result, cs.data);
let items = Promise.all(cs.data.items.map(async contact => {
let result = await messages.getLastMessage(data.tenant_id, contact.item.contact_id, data.hash_id)
contact.item.last_message = result.code === 200 && result.data.length > 0 ? result.data[0] : null;
return contact;
}));
json.items = items;
result = json;
}
let event = result.authenticated ? 'valid_session' : 'invalid_session';
console.log('222');
proxy.send(event, result);
res.status(201).send({});
} catch (err) {
proxy.send('invalid_session', {socket_id: data.socket_id})
next (err);
}
});