Passing an array to a JSON object for Jade rendering - node.js

I have a node.js server written in express and at a certain moment I send to some .jade page an array. The problem is that when rendering the Jade page, the Jade compiler renders the array as [object Object] and the JavaScript compiler on Chrome complains about it saying "Unexpected identifier".
This is the Jade code:
!!! 5
html(lang="en")
head
title= "Rankings"
body
h1 Ranking
div(id="rankings")
script(type='text/javascript')
function fillRanking(){
var rankArray = #{ranking};
alert("inside fillranking");
var divElement = document.getElementById("rankings");
for(var i = 0; i< rankArray.length; i++){
divElements.innerHTML += "" + i+1 + ". " + rankArray[i].Username + " " + rankArray[i].Points;
}
}
fillRanking();
As you can see it's really simple, I just fill a div with the info given by what's inside the #{ranking} variable passed by node.js to Jade. The alert on the second line doesn't fire because the Unexpected Identifier error happens as soon as I try to assign the #{ranking} variable.
The following is the code in my node.js with express
app.get('/ranking', function (req, res) {
//get the first ten people in the ranking
var firstTen = getRanking(10, function(results){
//compute the array of results
var result = {
ranking: [],
}
for(var i = 0; i < results.length; i++){
result.ranking[i] = results[i];
}
//render the ranking with all the info
console.log(result);
res.render(__dirname + '/pages/ranking/ranking.jade', {
ranking: result,
});
});
});
I create an object with inside an array of results, I put the results I found out of the query inside it and I pass it to the rendering engine. The console.log(results) call prints the result object correctly, for example like this:
{ ranking:
[ { Username: 'usr1',
_id: 4ed27319c1d767f70e000002,
Points: 100 },
{ Username: 'usr2',
_id: 4ed27326c1d767f70e000003,
Points: 100 } ]
}
I don't really know how to handle the variable passed to the Jade page. Whichever thing I do I keep getting the "Unexpected identifier" error.
Does anyone of you know how do I solve this?
Thanks

Looking at the comments above and investigating a little bit more, here's what I've found to work:
Use this on your javascript (/controller):
...
res.render(__dirname + '/pages/ranking/ranking.jade', {
ranking: JSON.stringify(ranking),
})
...
And on jade template:
...
function fillRanking(){
var rankArray = !{ranking};
alert("inside fillranking");
...
This works because !{} doesn't perform escaping.

This works for me.
JSON.stringify the array (or any arbitrary JSON object really) on the server and then JSON.parse it on the client.
server-side:
res.render(__dirname + '/pages/ranking/ranking.jade', {
ranking: JSON.stringify(result),
});
client-side:
var rankArray = JSON.parse( !{JSON.stringify(ranking)} );

Because I also use the array from the controller for iteration, I did this:
res.render('template', pArrayOfData);
And in the jade code:
script(type='text/javascript').
var _histData = !{JSON.stringify(pArrayOfData)};

I encountered the same problem and was able make it work with a slight variation (thanks to the solutions above).
From my code:
Backend:
Stringify the JSON array
router.get('/', function(req, res) {
var data = JSON.stringify(apiData); // <====
res.render('gallery', { data: apiData });
});
Frontend:
Stringify again with the !{}
function injectDataOnView() {
var data = !{JSON.stringify(data)}; // <====
var divElement = document.getElementById('data');
divElement.innerHTML = data[1]['id']; // <=== just a test, so no for loop
}
injectDataOnView();

Related

Mongodb Nodejs Regex Search

Hey I am making a website which as a partial search form. Reference from:https://www.youtube.com/watch?v=ZC2aRON3fWw&t=42s But I couldnt understand why it doesnt work. I use pug instead of hbs.
And these are my codes:
app.get('/sonuc', function(req, res, next){
var q = req.query.q;
Article.find({
title : {
$regex: new RegExp(q)
}
}, {
_id:0,
__v:0
}, function(err, data){
res.render('sonuc', {data:data})
}).limit(10);
});
});
Then this is my layout pug:
.ui-widget
form.form-inline.my-2.my-lg-0
input.form-control.mr-sm-2(type='text', onkeyup='showResults(this.value)', placeholder='Search',action='/article/'+sorgu, aria-label='Search')
button.btn.btn-secondary.my-2.my-sm-0(type='submit')
#search-results(style='width:60px;top:-1px;position:relative')
In this layout pug I thing the onkeyup issue is not working. How can I implement that fu nction on that form?
And ths is my main.js whihc takes query from database and write it in html form:
var showResults = debounce(function(arg){
var value = arg.trim();
if(value == "" || value.length <= o){
$("#search-results").fadOut();
return;
}else{
$("#search-results").fadeIn();
};
var jqhr = $.get('/article/' + value, function(data){
})
.done(function(data){
if(data.length == 0){
$("search-resuts").append('<p classs="lead text-center mt-2">No Results</p>');
}else{
data.forEach(x => {
$("search-resuts").append('<p class="m-2 lead"><img style="width:60px;" src="images/supreme1.jpg">' + x.title +'</p>');
});
}
})
.fail(function(err){
console.log(err);
})
}); 200;
function debounce(func, wait, immediate){
var timeout;
return function(){
var context = this;
args = arguments;
var later = function(){
timeout= null;
if(!immediate)func.apply(context,args);
};
var callNow = immediate && !timeout;
clearTimeout(timeout);
timeout = setTimeout(later, wait);
if(callNow)func.apply(context,args);
};
};
I cannot understand these issues and why it doesnt work.As a summary, I want to make a search engine which works with regex and mongodb. İt will be partial that is shown in that youtoube video which is on the above of my article that I referenced. But the real issue is, I couldnt understand the code block of function showResults and I dont know how to translate this codes to my project. So that I am waiting your help. I cannot upload a video to this website so that if you can give me your facebook, instagram or email account I can send the issue which should be solved. I need your help. I have been making this project for a long time for my school but I cannot move on. Please I need your help.
I hope I could express myself well and your helps will solve it.
Yes I got it know. I have made lots of changes instead of using this code blockes.
I wathced : https://www.youtube.com/watch?v=9_lKMTXVk64
And also I documented : Fuzzy Searching with Mongodb?
These two documentations help me a lot. So that my code is almost approximately 90% same as shown in these documentations.
This is my app.js :
app.get('/sonuc', function(req, res){
if (req.query.search) {
const regex = new RegExp(escapeRegex(req.query.search), 'gi');
Article.find({ "title": regex }, function(err, articles) {
if(err) {
console.log(err);
} else {
res.render("index", {
articles: articles
});
}
});
}
});
This is my index.pug file :
extends layout
block content
body
br
br
br
ul.list-group
each article, i in articles
li.list-group-item
a(href="/article/" + article._id)= article.title
And this is my layout.pug file(But I will consider the text form)
form.form-inline.my-2.my-lg-0(action="/sonuc" method="GET")
input.form-control.mr-sm-2(type='text', name,="search", onkeyup='showResults(this.value)', placeholder='Search',action='/article/'+sorgu, aria-label='Search')
button.btn.btn-secondary.my-2.my-sm-0(type='submit')
Please look at:
https://github.com/nax3t/fuzzy-search
Because in my solution I didnt add the message that is applied when we cannot find the word. Or when there is no such a word which is searched by user. The noMatch query.
But after I will apply it to my project I will add it to here.

Having difficulties with node.js res.send() loop

I'm attempting to write a very basic scraper that loops through a few pages and outputs all the data from each url to a single json file. The url structure goes as follows:
http://url/1
http://url/2
http://url/n
Each of the urls has a table, which contains information pertaining to the ID of the url. This is the data I am attempting to retrieve and store inside a json file.
I am still extremely new to this and having a difficult time moving forward. So far, my code looks as follows:
app.get('/scrape', function(req, res){
var json;
for (var i = 1163; i < 1166; i++){
url = 'https://urlgoeshere.com' + i;
request(url, function(error, response, html){
if(!error){
var $ = cheerio.load(html);
var mN, mL, iD;
var json = { mN : "", mL : "", iD: ""};
$('html body div#wrap h2').filter(function(){
var data = $(this);
mN = data.text();
json.mN = mN;
})
$('table.vertical-table:nth-child(7)').filter(function(){
var data = $(this);
mL = data.text();
json.mL = mL;
})
$('table.vertical-table:nth-child(8)').filter(function(){
var data = $(this);
iD = data.text();
json.iD = iD;
})
}
fs.writeFile('output' + i + '.json', JSON.stringify(json, null, 4), function(err){
console.log('File successfully written! - Check your project directory for the output' + i + '.json file');
})
});
}
res.send(json);
})
app.listen('8081')
console.log('Magic happens on port 8081');
exports = module.exports = app;
When I run the code as displayed above, the output within the output.json file only contains data for the last url. I presume that's because I attempt to save all the data within the same variable?
If I include res.send() inside the loop, so the data writes after each page, I receive the error that multiple headers cannot be sent.
Can someone provide some pointers as to what I'm doing wrong? Thanks in advance.
Ideal output I would like to see:
Page ID: 1
Page Name: First Page
Color: Blue
Page ID: 2
Page Name: Second Page
Color: Red
Page ID: n
Page Name: Nth Page
Color: Green
I can see a number of problems:
Your loop doesn't wait for the asynchronous operations in the loop, thus you do some things like res.send() before the asynchronous operations in the loop have completed.
In appropriate use of cheerio's .filter().
Your json variable is constantly being overwritten so it only has the last data in it.
Your loop variable i would lose its value by the time you tried to use it in the fs.writeFile() statement.
Here's one way to deal with those issues:
const rp = require('request-promise');
const fsp = require('fs').promises;
app.get('/scrape', async function(req, res) {
let data = [];
for (let i = 1163; i < 1166; i++) {
const url = 'https://urlgoeshere.com/' + i;
try {
const html = await rp(url)
const $ = cheerio.load(html);
const mN = $('html body div#wrap h2').first().text();
const mL = $('table.vertical-table:nth-child(7)').first().text();
const iD = $('table.vertical-table:nth-child(8)').first().text();
// create object for this iteration of the loop
const obj = {iD, mN, mL};
// add this object to our overall array of all the data
data.push(obj);
// write a file specifically for this invocation of the loop
await fsp.writeFile('output' + i + '.json', JSON.stringify(obj, null, 4));
console.log('File successfully written! - Check your project directory for the output' + i + '.json file');
} catch(e) {
// stop further processing on an error
console.log("Error scraping ", url, e);
res.sendStatus(500);
return;
}
}
// send all the data we accumulated (in an array) as the final result
res.send(data);
});
Things different in this code:
Switch over all variable declarations to let or const
Declare route handler as async so we can use await inside.
Use the request-promise module instead of request. It has the same features, but returns a promise instead of using a plain callback.
Use the promise-based fs module (in latest versions of node.js).
Use await in order to serialize our two asynchronous (now promise-returning) operations so the for loop will pause for them and we can have proper sequencing.
Catch errors and stop further processing and return an error status.
Accumulate an object of data for each iteration of the for loop into an array.
Change .filter() to .first().
Make the response to the request handler be a JSON array of data.
FYI, you can tweak the organization of the data in obj however you want, but the point here is that you end up with an array of objects, one for each iteration of the for loop.
EDIT Jan, 2020 - request() module in maintenance mode
FYI, the request module and its derivatives like request-promise are now in maintenance mode and will not be actively developed to add new features. You can read more about the reasoning here. There is a list of alternatives in this table with some discussion of each one. I have been using got() myself and it's built from the beginning to use promises and is simple to use.

node's module function return value empty/undefined?

I'm trying to get the html encoded table row value, returned from the slqLite based logger. As I'm new to node modules I'm stuck at:
var sqlite3 = require('sqlite3').verbose();
var db = new sqlite3.Database(':memory:');
var html = '';
module.exports = {
readHtml: function() {
var html = ''; // optional but does not work here as well
db.serialize(function() {
db.each("SELECT rowid AS id, info FROM logger", function(err, row) {
html = html + '<tr><td>' + row.info + '<td><tr>'; << html is growing
console.log('Log: ' + row.info); << working
});
});
console.log(html); // html gets empty here!
return html;
}
}
So have no value returned from:
var sysLog = require('logger');
sysLog.init();
sysLog.write('test string1');
sysLog.write('test string2');
console.log(sysLog.readHtml());
It has to be very simple to be solved ...
node is 6.7
You problem is directly related to a very common issue when starting with JavaScript:
How do I return the response from an asynchronous call?
Which shows the simplest way to receive results of an asynchronous operation, such as db.each is using a callback.
function readHtml()
var html = ''
db.serialize(function() {
db.each(..., function(err, row) {
// this line executes sometime later
// after we already returned from readHtml()
});
});
// this line executes right after the call to db.serialize
// but before db.each calls the callback we give to it.
// At this point, html is '' because we still didn't read any rows
// (they are read asynchronously, sometime later)
return html;
}
readHtml(); // <-- this is always '' because rows are read at a later point
To solve this, you would need a function that will be called with a callback like this:
readHtml(function(html) { // <-- this callback gets called once all rows are read
console.log(html);
});
Your situation also has an additional complication that db.each calls its callback once for every row. By looking at the docs, you can see that db.each accepts an additional complete callback when all rows are read. You can use this callback to signalize reading is done and pass the html results.
Here's how you can define readHtml:
function readHtml(callback) { // pass in a callback to call once all rows are read and all html is accumulated
var html = '';
db.serialize(function() {
// read all the rows and accumulate html as before
db.each("SELECT rowid AS id, info FROM logger", function(err, row) {
html = html + '<tr><td>' + row.info + '<td><tr>';
}, function() {
callback(html); // use the second callback to signal you are done and pass the html back
});
});
}

convert mongoose stream to array

I have worked with mongodb but quite new to mongoose ORM. I was trying to fetch data from a collection and the explain() output was showing 50ms. the overall time it was taking to fetch the data via mongoose was 9 seconds. Here is the query:
Node.find({'dataset': datasetRef}, function (err, nodes){
// handle error and data here
});
Then I applied index on the field I was querying on. The explain() output now showed 4ms. But the total time to retrieve data via mongoose did not change. Then i searched a bit and found that using lean() can help bring the performance of read queries in mongoose quite close to native mongodb
So I changed my query to:
Node.find({'dataset': datasetRef})
.lean()
.stream({transform: JSON.stringify})
.pipe(res)
This solved the performance issues completely. But the end result is a stream of JSON docs like this:
{var11: val11, var12: val12}{var21: val21, var22: val22} ...
How do I parse this to form an array of docs ? Or should I not be using stream at all ? In my opinion, there is no point using a stream if I am planning to form the array at backend, since I will then have to wait for all the docs to be read into memory. But I also think that parsing and creating the whole array at front end might be costly.
How can I achieve best performance in this case without clogging the network as well ?
UPDATE
I am trying to solve this problem using a through stream. However, I am not able to insert commas in between the JSON objects yet. See the code below:
res.write("[");
var through = require('through');
var tr = through(
function write(data){
this.queue(data.replace(/\}\{/g,"},{"));
}
);
var dbStream = db.node.find({'dataset': dataSetRef})
.lean()
.stream({'transform': JSON.stringify});
dbStream.on("end", function(){
res.write("]");
});
dbStream
.pipe(tr)
.pipe(res);
With this, I am able to get the "[" in the beginning and "]" at the end. However, still not able to get patten "}{" replaced with "},{". Not sure what am I doing wrong
UPDATE 2
Now figured out why the replace is not working. It appears that since I have specified the transform function as JSON.stringify, it reads one JSON object at a time and hence never encounter the pattern }{ since it never picks multiple JSON elements at a time.
Now I have modified my code, and written a custom transform function which does JSON.stringify and then appends a comma at the end. The only problem I am facing here is that I don't know when it is the last JSON object in the stream. Because I don't wanna append the comma in that case. At the moment, I append an empty JSON object once the end is encountered. But somehow this does not look like a convincing idea. Here is the code:
res.write("[");
function transform(data){
return JSON.stringify(data) + ",";
}
var dbStream = db.node.find({'dataset': dataSetRef})
.lean()
.stream({'transform': transform});
dbStream.on("end", function(){
res.write("{}]");
});
dbStream
.pipe(res);
The only problem I am facing here is that I don't know when it is the last JSON object in the stream.
But you do know which one is first. Knowing that, instead of appending the comma, you can prepend it to every object except the first one. In order to do that, set up your transform function inside a closure:
function transformFn(){
var first = true;
return function(data) {
if (first) {
first = false;
return JSON.stringify(data);
}
return "," + JSON.stringify(data);
}
}
Now you can just call that function and set it as your actual transform.
var transform = transformFn();
res.write("[");
var dbStream = db.node.find({'dataset': dataSetRef})
.lean()
.stream({'transform': transform});
dbStream.on("end", function(){
res.write("]");
});
dbStream
.pipe(res);
#cbajorin and #rckd both gave correct answers.
However, repeating this code all the time seems like a pain.
Hence my solution uses an extra Transform stream to achieve the same thing.
import { Transform } from 'stream'
class ArrayTransform extends Transform {
constructor(options) {
super(options)
this._index = 0
}
_transform(data, encoding, done) {
if (!(this._index++)) {
// first element, add opening bracket
this.push('[')
} else {
// following element, prepend comma
this.push(',')
}
this.push(data)
done()
}
_flush(done) {
if (!(this._index++)) {
// empty
this.push('[]')
} else {
// append closing bracket
this.push(']')
}
done()
}
}
Which in turn can be used as:
const toArray = new ArrayTransform();
Model.find(query).lean().stream({transform: JSON.stringify })
.pipe(toArray)
.pipe(res)
EDIT: added check for empty
I love #cdbajorin's solution, so i created a more readable version of it (ES6):
Products
.find({})
.lean()
.stream({
transform: () => {
let index = 0;
return (data) => {
return (!(index++) ? '[' : ',') + JSON.stringify(data);
};
}() // invoke
})
.on('end', () => {
res.write(']');
})
.pipe(res);
var mongoose = require('mongoose');
mongoose.connect('mongodb://localhost/shoppingdb');
var Sports = mongoose.model('sports', {});
var result = [];
var prefix_out = "your info";
Sports.find({"goods_category": "parts"}).
cursor().
on("data", function(doc){
//stream ---> string
var str = JSON.stringify(doc)
//sring ---> JSON
var json = JSON.parse(str);
//handle Your Property
json.handleYourProperty = prefix_out + json.imageURL;
result.push(result);
}).
on('error', function(err){
console.log(err);
}).
on('close', function(){
console.log(result);
});

Filtering Markdown content in Jade / Node.js

Esteemed Hackers,
I wish to filter a string full of Markdown within a Jade template.
I have the Markdown in a variable.
Jade interpolates a variable inside the Markdown just fine:
This:
var jade = require('jade');
var jade_string = [
':markdown',
' ## This is markdown!',
' * First',
' * #{var2}',
' * Third'
].join('\n');
var fn = jade.compile( jade_string, { pretty: true } );
console.log( fn( { var1: "First!", var2: "Second!" } ) );
Begets this:
<h2>This is markdown!</h2>
<ul>
<li>First</li>
<li>Second!</li>
<li>Third</li>
</ul>
However, what I have is actual complete Markdown inside the variable.
And this:
var jade = require('jade');
var jade_string = [
'div.markedup',
' :markdown',
' \\#{var2}'
].join('\n');
var fn = jade.compile( jade_string, { pretty: true } );
var markdown = [
'## I am marked!',
'* One',
'* Two'
].join('\n');
console.log( fn( { var1: "First!", var2: markdown } ) );
Delivers only this:
<div class="markedup"><p>## I am marked!
* One
* Two</p>
</div>
Thus it looks to me like Jade filters the block before doing any variable
interpolation, then interpolates variables in the resulting HTML. This is
fine if you wish to write your templates in Markdown, but it's not much
help if you want to write your content in Markdown.
I know I can solve this problem with more programming but I feel like I
must be missing something. After all, holding snippets of Markdown content in
a database and stuffing the resulting HTML fragments into templates seems like
the most obvious use-case for a :markdown filter.
Is there a "normal" way to do this in Jade?
Many thanks in advance for the pending enlightenment.
I think the answer is more programming, but I'll show you what I do. I use custom middleware that let's me combine arbitrary transformative processes before I get to my final HTML document output. So, for example, I have the following filters in my middleware.js module, which I will explain in turn.
So simple views just use normal jade with its various filters for markdown, javascript, coffeescript. Some views, for example a blog post, require a more sophisticated middleware chain, which goes like this.
First, based on the request, I establish the file that holds the core content for this response, and set that as a property on res.viewPath. This could be a raw HTML fragment file or a markdown file. Then I send the response through a series of middleware transformations. I use res.html and res.dom to store intermediate representations of the response as it is being built up.
This one just stores raw HTML (just a document body fragment with no head or layout).
html = function(req, res, next) {
if (!/\.html$/.test(res.viewPath)) return next();
return fs.readFile(res.viewPath, "utf8", function(error, htmlText) {
res.html = htmlText;
return next(error);
});
};
This one will convert a markdown file to HTML (using the markdown-js module).
markdownToHTML = function(req, res, next) {
if (!/\.md$/.test(res.viewPath)) return next();
return fs.readFile(res.viewPath, "utf8", function(error, markdownText) {
res.html = markdown(markdownText);
return next(error);
});
};
I have a sub-layout that goes within my master layout but around each blog post. So I wrap the blog post in the sublayout here. (Separate code not shown generates the res.post object from a json metadata file).
blogArticle = function(req, res, next) {
var footerPath, post;
post = res.post;
footerPath = path.join(__dirname, "..", "templates", "blog_layout.jade");
return fs.readFile(footerPath, "utf8", function(error, jadeText) {
var footerFunc;
if (error) return next(error);
footerFunc = jade.compile(jadeText);
res.html = footerFunc({
post: post,
body: res.html
});
return next();
});
};
Now I wrap my layout around the main content HTML. Note that I can set things like the page title here, or wait until later since I can manipulate the response via jsdom after this. I do body: res.html || "" so I can render an empty layout and insert the body later if that is more convenient.
exports.layout = function(req, res, next) {
var layoutPath;
layoutPath = path.join(__dirname, "..", "templates", "layout.jade");
return fs.readFile(layoutPath, "utf8", function(error, jadeText) {
var layoutFunc, locals;
layoutFunc = jade.compile(jadeText, {
filename: layoutPath
});
locals = {
config: config,
title: "",
body: res.html || ""
};
res.html = layoutFunc(locals);
return next(error);
});
};
Here's where the really powerful stuff comes. I convert the HTML string into a jsdom document object model which allows for jQuery based transformations on the server side. The toMarkup function below just allows me to get the HTML back without the extra <script> tag for our in-memory jquery which jsdom has added.
exports.domify = function(req, res, next) {
return jsdom.env(res.html, [jqueryPath], function(error, dom) {
if (error) return next(error);
res.dom = dom;
dom.toMarkup = function() {
this.window.$("script").last().remove();
return this.window.document.doctype + this.window.document.innerHTML;
};
return next(error);
});
};
So here's a custom transformation I do. This can replace a made-up DSL tag like <flickrshow href="http://flickr.com/example"/> with real valid HTML, which otherwise would be a big nasty <object> boilerplate I would have to duplicate in each blog post, and if flickr ever changed the boilerplate markup they use, it would be a maintenance pain to go fix it in many individual blog post markdown files. The boilerplate they currently use is in the flickrshowTemplate variable and contains a little mustache placeholders {URLs}.
exports.flickr = function(req, res, next) {
var $ = res.dom.window.$;
$("flickrshow").each(function(index, elem) {
var $elem, URLs;
$elem = $(elem);
URLs = $elem.attr("href");
return $elem.replaceWith(flickrshowTemplate.replace(/\{URLs\}/g, URLs));
});
return next();
};
Ditto for embedding a youtube video. <youtube href="http://youtube.com/example"/>.
exports.youtube = function(req, res, next) {
var $ = res.dom.window.$;
$("youtube").each(function(index, elem) {
var $elem, URL;
$elem = $(elem);
URL = $elem.attr("href");
return $elem.replaceWith(youtubeTemplate.replace(/\{URL\}/, URL));
});
return next();
};
Now I can change the title if I want, add/remove javascripts or stylesheets, etc. Here I set the title after the layout has already been rendered.
postTitle = function(req, res, next) {
var $;
$ = res.dom.window.$;
$("title").text(res.post.title + " | Peter Lyons");
return next();
};
OK, time to go back to final HTML.
exports.undomify = function(req, res, next) {
res.html = res.dom.toMarkup();
return next();
};
Now we ship it off!
exports.send = function(req, res) {
return res.send(res.html);
};
To tie it all together in order and have express use it, we do
postMiddleware = [
loadPost,
html,
markdownToHTML,
blogArticle,
layout,
domify,
postTitle,
flickr,
youtube,
undomify,
send
]
app.get("/your/uri", postMiddleware);
Concise? No. Clean? I think so. Flexible? Extremely. Blazingly fast? Probably not wicked fast as I believe jsdom is one of the more heavyweight things you can do, but I'm using this as a static site generator so speed is irrelevant. Of course, it would be trivial to add another function to the begining and end of the middleware chain to write the final HTML to a static file and serve it directly if it is newer than the corresponding markdown page body content file. Stackoverflowers, I'd love to hear thoughts and suggestions on this approach!
Jade people say passing variables to filters is not supported. I couldn’t get through Peter Lyons’s answer, so I used this:
marked = require 'marked'
marked.setOptions
<my options>
app.locals.md = marked
Then in Jade:
!= md(<markdown string>)
Quick and dirty. Probably suboptimal because it runs conversion each time without caching the result (I think), but at least it works.
(Edit)
You could also use marked to render markdown in the browser, offloading some work from your server and making loading faster.

Resources