I set up a website that basically uses Nodejs to fetch the image and after that sends it to ejs to display on the page, what happens is that sometimes the image appears and sometimes it looks like the website loads before the image can be loaded by the node.
I left the two ways I tried, one commented and the other that was the last one I tried.
This is app.js
function retornaImagens(id){
let imagens= {
assPacUrl: ''
}
/*
if (fs.existsSync(`${__dirname}\\arquivos\\consultas\\${id}\\assPac.png`)) {
fs.readFile(
`${__dirname}\\arquivos\\consultas\\${id}\\assPac.png`, 'base64',
(err, base64Image) => {
if (err) {
console.log(err)
} else {
imagens.assPacUrl = `data:image/png;base64, ${base64Image}`
}
}
)
}
*/
fs.access(`${__dirname}\\arquivos\\consultas\\${id}\\assPac.png`,(err)=>{
if(err){}else{
fs.readFile(
`${__dirname}\\arquivos\\consultas\\${id}\\assPac.png`, 'base64',
(err, base64Image) => {
if (err) {
console.log(err)
} else {
imagens.assPacUrl = `data:image/png;base64, ${base64Image}`
}
}
)
}
})
return imagens;
}
app.get('/consultas/:id/termo',(req,res)=>{
var imagens = retornaImagens(req.params.id);
Consulta.findOne({link:`/consultas/${req.params.id}/login`}).populate('medico paciente').exec((err,consulta)=>{
if(err){
console.log(err)
}else{
console.log(imagens)
res.render('consulta/termo',{consulta:consulta,id:req.params.id,imagens})
}
})
})
This is the ejs file
<img src="<%= imagens.assPacUrl %>">
If you have tips to make the code cleaner and consume less memory, please send me.
The problem was in the loading time, it was taking longer to load the image so the program continued and rendered the website empty, adding a settimeOut.
function enviaTermo(data){
Consulta.findOne({link:data.link}).populate('medico paciente').exec((err, consulta) => {
if (err) {
console.log(err)
} else {
console.log(imagens)
io.to(consulta._id).emit('termo', { consulta: consulta, imagens: imagens })
}
})
}
setTimeout(() => {
enviaTermo(data)
}, 450);
Related
I am running a Express application with Node in the backend. I have 2 functions in a component in NodeJS which I am trying to access from my service. The link for both are the same in the service. It is able to connect one of the functions from the service.
However, it is showing 404 not found for accessing the second function in the same component.
Has anyone faced any such issue and if so how can it be rectified?
EDITED :
Some code for reference :
component1.component.ts
getallprojectcat()
{
this.authenticationService.getprojectcat()
.pipe(first())
.subscribe(
data => {
this.data = data;
},
error => {
this.loading = false;
});
}
}
component2.component.ts
showprojects(moid)
{
this.authenticationService.getprojectslist(moid)
.pipe(first())
.subscribe(
data => {
this.silver = data;
},
error => {
console.log('some error');
this.alertService.error(error);
this.loading = false;
});
}
the .service file
getprojectcat()
{
return this.http.get<any>(this.studenturl+'/getprojectcata/')
.pipe(map(allprojectcat => {
console.log(JSON.stringify(allprojectcat));
return allprojectcat;
}));
}
getprojectslist(moid)
{
return this.http.get(this.studenturl+'/getprojects/'+moid)
.pipe(map(projectslist => {
console.log("Projects List:"+JSON.stringify(projectslist));
return projectslist;
})).catch(this.handleError);
}
Backend .js file
exports.getprojectcata = function(req, res){
console.log("First Function");
};
exports.getprojects = function(req, res){
console.log("Second Function");
};
The function getprojectcata is working in the first component. However, it shows an 404 not found on the getprojects function in the second component. I have checked the following things -
Routing does not seem to be the problem as it is moving to the next component without any issues.
We have also tried calling the getprojectscata through the same service in component and it worked.
I am new to Apostrophe and trying to create a contact us form with file attachment in Apostrophe by following the tutorial.
https://apostrophecms.org/docs/tutorials/intermediate/forms.html
I have also created the attachment field in my index.js and it works fine from the admin panel.
Now, I am trying to create my own html for the form with file submission.
// in lib/modules/contact-form-widgets/public/js/always.js
apos.define('contact-form-widgets', {
extend: 'apostrophe-widgets',
construct: function(self, options) {
self.play = function($widget, data, options) {
var $form = $widget.find('[data-contact-form]');
var schema = self.options.submitSchema;
var piece = _.cloneDeep(self.options.piece);
return apos.schemas.populate($form, self.schema, self.piece, function(err) {
if (err) {
alert('A problem occurred setting up the contact form.');
return;
}
enableSubmit();
});
function enableSubmit() {
$form.on('submit', function() {
submit();
//I can access file here
// console.log($form.find('file'))
return false;
});
}
function submit() {
return async.series([
convert,
submitToServer
], function(err) {
if (err) {
alert('Something was not right. Please review your submission.');
} else {
// Replace the form with its formerly hidden thank you message
$form.replaceWith($form.find('[data-thank-you]'));
}
});
function convert(callback) {
return apos.schemas.convert($form, schema, piece, callback);
}
function submitToServer(callback) {
return self.api('submit', piece, function(data) {
alert("I AM AT SUBMIT API ")
if (data.status === 'ok') {
// All is well
return callback(null);
}
// API-level error
return callback('error');
}, function(err) {
// Transport-level error
alert("I AM HERE AT API ERROR")
return callback(err);
});
}
}
};
}
});
//and my widget.html is
<div class="form-group">
<input name="custom-file" type="file">
</div>
When I run this I get following errors
user.js:310 Uncaught TypeError: Cannot read property 'serialize' of undefined
at Object.self.getArea (user.js:310)
at Object.self.getSingleton (user.js:303)
at Object.convert (user.js:686)
at user.js:164
at async.js:181
at iterate (async.js:262)
at async.js:274
at async.js:44
at setImmediate.js:27
at runIfPresent (setImmediate.js:46)
My question is, how do I handle file submission? Is there any better approach for this?
This is much easier to do using the apostrophe-pieces-submit-widgets module, which allows you to define a schema for what the user can submit. You can include a field of type attachment in that, and this is demonstrated in the README.
I want to save an image to my node.js folder.
Save function:
function saveBrochure(brochure, image, file) {
return uploadBrochure(file).then(
data => {
uploadImage(image);
return brochureModel.addBrochure(brochure);
},
error => {
return error;
}
);
}
upload function:
function uploadImage(image) {
var path = 'my-path/' + image.filename;
fs.writeFile(path, image, function(err) {
if (err) {
console.log(err);
}
});
}
My image object looks like this
destination:"path"
encoding:"7bit"
fieldname:"image"
filename:"filename"
mimetype:"image/png"
originalname:"filename"
path:"path/filename.png"
size:155217
After the upload, I can see that a corrupted image file has been added to my project.
Any idea what I'm doing wrong?
I am work with isntagram api in node js. i have one array and in the array store above 20k up instagram id. and then i am do foreach on that array and one by one take instagram id and go for the take bio but that time i am getting error like this RequestsLimitError: You just made too many request to instagram API. i am try every 5 call after set time out also but still i am getting same error so how can resolved this error any one know how can fix it then please let me know.
Here this is my code =>
var InstaId = ["12345687",20k more id store here in the array]
var changesessionFlage = 0;
async.each(InstaId, function (id, callback) {
async.parallel([
function (cb) {
if (id) {
setTimeout(function () {
Client.Account.getById(sess, id).then(function (bio) {
console.log("changesessionFlage" + changesessionFlage);
changesessionFlage++
//console.log("bio : ", bio._params); // here i am getting bio one by one user
if (changesessionFlage == 6) {
changesessionFlage = 0;
}
cb(null, bio._params);
})
.catch(function (err) {
console.log("get boi: ", err)
cb(null, bio._params);
})
}, (changesessionFlage == 5) ? 10000 : 0)
}
}
], function (err, results) {
if (err) {
console.log(err);
return;
}
Result = results
callback();
});
}, function (err) {
if (err) {
console.log(err);
return;
}
else {
console.log("Result=>", Result)
if (Result) {
console.log("Result[0]=>", Result[0])
var ws = XLSX.utils.json_to_sheet(Result[0]);
var wb = XLSX.utils.book_new();
XLSX.utils.book_append_sheet(wb, ws, "People");
var wbout = XLSX.write(wb, { bookType: 'xlsx', type: 'binary' });
res.end(wbout, 'binary');
}
}
});
any one know how can fix this issue then please help me.
Your setTimeout is use incorrectly, all API calls are made at once after 10000 delay.
Since this is a one time job, just split the 20K usernames to 4K batches and execute them every hour. This way you will be under the 5k/hr API limit
I am having an issue with concurrent requests that are updating the same document. I'm not using findAndModify() because I need to access the current state of the document to make the update which I don't see supported with findAndModify(). I also would like to avoid using db.fsyncLock() since that locks the entire database and I only need to lock one document in one collection.
First I use findOne() to get a document, then I use the updateOne() in the callback of findOne() to update the same document. When I queue up a bunch of actions and run them all at once I believe they are all accessing the same state when they call findOne() instead of waiting for the updateOne() to complete from the previous action.
How should I handle this?
mongoDBPromise.then((db)=> {
db.collection("notes").findOne(
{path: noteId},
(err, result)=> {
if (err) {
console.log(err);
return;
}
if (!result.UndoableNoteList.future.length) {
console.log("Nothing to redo");
return;
}
let past = result.UndoableNoteList.past.concat(Object.assign({},result.UndoableNoteList.present));
let present = Object.assign({},result.UndoableNoteList.future[0]);
let future = result.UndoableNoteList.future.slice(1, result.UndoableNoteList.future.length);
db.collection("notes").updateOne(
{path: noteId},
{
$set: {
UndoableNoteList: {
past: past,
present: present,
future:future
}
}
},
(err, result)=> {
if (err) {
console.log(err);
return;
}
}
)
}
);
});
As updateOne() is an async call, findOne() won't wait for it to complete and hence there can be situations where the same document is updated simultaneously, which won't be allowed in mongo.
I think updateOne() is not necessary in this case. Note that you have already found the right instance of the document which needs to be updated in findOne() query. Now, you can update that instance and save that document without doing updateOne(). I think the problem can be avoided this way:
mongoDBPromise.then((db)=> {
db.collection("notes").findOne(
{path: noteId},
(err, result)=> {
if (err) {
console.log(err);
return;
}
if (!result.UndoableNoteList.future.length) {
console.log("Nothing to redo");
return;
}
let past = result.UndoableNoteList.past.concat(Object.assign({},result.UndoableNoteList.present));
let present = Object.assign({},result.UndoableNoteList.future[0]);
let future = result.UndoableNoteList.future.slice(1, result.UndoableNoteList.future.length);
result.UndoableNoteList.past = past;
result.UndoableNoteList.present = present;
result.UndoableNoteList.future = future;
//save the document here and return
}
);
});
Hope this answer helps you!
I was not able to find a way to sequentially run the queries using purely mongodb functions. I've written some node.js logic that blocks mongodb queries from running on the same document and adds those queries to a queue. Here's what the code currently looks like.
The Websocket Undo Listener
module.exports = (noteId, wsHelper, noteWebSocket) => {
wsHelper.addMessageListener((msg, ws)=> {
if (msg.type === "UNDO") {
noteWebSocket.broadcast(msg, noteWebSocket.getOtherClientsInPath(noteId, wsHelper));
noteWebSocket.saveUndo(noteId);
}
});
};
The saveUndo function called from the listener
saveUndo(noteId) {
this.addToActionQueue(noteId, {payload: noteId, type: "UNDO"});
this.getNoteByIdAndProcessQueue(noteId);
}
The getNoteByIdAndProcessQueue function called from saveUndo
getNoteByIdAndProcessQueue(noteId) {
if (this.isProcessing[noteId])return;
this.isProcessing[noteId] = true;
mongoDBPromise.then((db)=> {
db.collection("notes").findOne(
{path: noteId},
(err, result)=> {
if (err) {
this.isProcessing[noteId] = false;
this.getNoteByIdAndProcessQueue(noteId);
return;
}
this.processQueueForNoteId(noteId, result.UndoableNoteList);
});
});
}
The processQueueForNoteId function
processQueueForNoteId(noteId, UndoableNoteList) {
this.actionQueue[noteId].forEach((action)=> {
if (action.type === "UNDO") {
UndoableNoteList = this.undoNoteAction(UndoableNoteList);
} else if (action.type === "REDO") {
UndoableNoteList = this.redoNoteAction(UndoableNoteList);
} else if (action.type === "ADD_NOTE") {
UndoableNoteList = this.addNoteAction(UndoableNoteList, action.payload);
} else if (action.type === "REMOVE_NOTE") {
UndoableNoteList = this.removeNoteAction(UndoableNoteList, action.payload);
}
});
let actionsBeingSaved = this.actionQueue[noteId].concat();
this.actionQueue[noteId] = [];
mongoDBPromise.then((db)=> {
db.collection("notes").updateOne(
{path: noteId},
{
$set: {
UndoableNoteList: UndoableNoteList
}
},
(err, result)=> {
this.isProcessing[noteId] = false;
// If the update failed then try again
if (err) {
console.log("update error")
this.actionQueue[noteId] = actionsBeingSaved.concat(this.actionQueue[noteId]);
}
// if action were queued during save then save again
if (this.actionQueue[noteId].length) {
this.getNoteByIdAndProcessQueue(noteId);
}
}
)
});
}