Show image with Express, instead of downloading it - node.js

A simple server I'm working can serve images. When browsing to the image URL directly, Chrome offers to download the image, rather than just showing it in the browser. Why is that? Presumably it is something in the headers?
The relevant code is this:
tilestore.getTile(req.param("z"), req.param("x"), req.param("y"), function(err, tile) {
if (!err) {
res.send(tile);
} else {
res.send("Tile rendering error: " + err + "\n");
}
});
Do I need to add something to the headers? They are currently as follows:
HTTP/1.1 200 OK
Server: nginx/1.4.6 (Ubuntu)
Date: Mon, 01 Sep 2014 07:22:30 GMT
Content-Type: application/octet-stream
Content-Length: 37779
Connection: keep-alive
X-Powered-By: Express
ETag: W/"9393-1937584155"

Ok, it's easy. Just set the content-type:
if (!err) {
res.contentType('image/png');
res.send(tile);
} else {
res.send("Tile rendering error: " + err + "\n");
}
These images are always .png.
New headers:
HTTP/1.1 200 OK
Server: nginx/1.4.6 (Ubuntu)
Date: Mon, 01 Sep 2014 07:33:22 GMT
Content-Type: image/png
Content-Length: 37779
Connection: keep-alive
X-Powered-By: Express
ETag: W/"9393-1937584155"

Just ran into this same problem, and found that modifying the content type alone was not enough. This answer, attempting to do the opposite, provided the solution. I also needed to add a content-disposition header:
res.set("Content-Disposition", "inline;");
Along with setting the correct content type, this allowed me to display my image rather than downloading.

I had the same problem with svg image files and it turned out that I had to change
'content-type': 'image/svg'
to
'content-type': 'image/svg+xml'

Related

Azure Functions: NodeJS - HTTP Response Renders as XML Rather than HTTP Response

I have an Azure function written in NodeJS where I'm attempting to cause an HTTP Redirect with a 302. The documentation is very sparse on what are valid entries in the response. As a result I've created an object with what I feel should be the correct entries to generate the redirect, but all I get is an XML response. Even items like the status code are shown in the XML rather than changing the real status code.
What am I doing wrong?
My Code:
module.exports = function(context, req){
var url = "https://www.google.com";
context.res = {
status: 302,
headers: {
Location: url
}
}
context.done();
}
This is the response I'm getting in the browser:
HTTP/1.1 200 OK
Cache-Control: no-cache
Pragma: no-cache
Content-Length: 1164
Content-Type: application/xml; charset=utf-8
Expires: -1
Server: Microsoft-IIS/8.0
X-AspNet-Version: 4.0.30319
X-Powered-By: ASP.NET
Date: Wed, 18 Jan 2017 00:54:20 GMT
Connection: close
<ArrayOfKeyValueOfstringanyType xmlns:i="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.microsoft.com/2003/10/Serialization/Arrays"><KeyValueOfstringanyType><Key>status</Key><Value xmlns:d3p1="http://www.w3.org/2001/XMLSchema" i:type="d3p1:int">302</Value></KeyValueOfstringanyType><KeyValueOfstringanyType><Key>headers</Key><Value i:type="ArrayOfKeyValueOfstringanyType"><KeyValueOfstringanyType><Key>Location</Key><Value xmlns:d5p1="http://www.w3.org/2001/XMLSchema" i:type="d5p1:string">https://www.google.com</Value></KeyValueOfstringanyType></Value></KeyValueOfstringanyType></ArrayOfKeyValueOfstringanyType>
The problem is that you're not defining the "body" in the response. This can be set to null, but it must be set for Azure functions to interpret it properly.
e.g. Update your code to:
module.exports = function(context, req){
var url = "https://www.google.com";
context.res = {
status: 302,
headers: {
Location: url
},
body : {}
}
context.done();
}
You will then get the desired response:
HTTP/1.1 302 Found
Cache-Control: no-cache
Pragma: no-cache
Content-Length: 0
Expires: -1
Location: https://www.google.com
Server: Microsoft-IIS/8.0
X-AspNet-Version: 4.0.30319
X-Powered-By: ASP.NET
Date: Wed, 18 Jan 2017 01:10:13 GMT
Connection: close
Edited 2/16/2017 - Using "null" for the body currently throws an error on Azure. As a result the answer was updated to use {} instead.
This is a bug with azure functions, see this content headers issue.
For now, a workaround is to remove any content related headers if the body of your response is null.

Emotion API Project Oxford base64 image

I am trying to make a call to the Emotion Api via JavaScript with in a PhoneGap app. I encoded the image into base64 and verified that the data can be decoded by one of the online tools. this is the code that i found on the web to use.
var apiKey = "e371fd4333ccad2"; //(you can get a free key on site this is modified for here)
//apiUrl: The base URL for the API. Find out what this is for other APIs via the API documentation
var apiUrl = "https://api.projectoxford.ai/emotion/v1.0/recognize";
"file" is the base64 string.
function CallAPI(file, apiUrl, apiKey)
{
// console.log("file=> " +file);
$.ajax({
url: apiUrl,
beforeSend: function (xhrObj) {
xhrObj.setRequestHeader("Content-Type", "application/octet-stream");
xhrObj.setRequestHeader("Ocp-Apim-Subscription-Key", apiKey);
},
type: "POST",
data: file,
processData: false
})
.done(function (response) {
console.log("in call api a");
ProcessResult(response);
})
.fail(function (error) {
console.log(error.getAllResponseHeaders())
});
}
function ProcessResult(response)
{
console.log("in processrult");
var data = JSON.stringify(response);
console.log(data);
}
I got back this:
Expires: -1
Access-Control-Allow-Origin: *
Pragma: no-cache
Cache-Control: no-cache
Content-Length: 60
Date: Fri, 01 Apr 2016 13:34:32 GMT
Content-Type: application/json; charset=utf-8
X-Powered-By: ASP.NET
So i tried their console test page.
https://dev.projectoxford.ai/docs/services/5639d931ca73072154c1ce89/operations/563b31ea778daf121cc3a5fa/console
I can put in an image like your "example.com/man.jpg" and it works great. but if i take the same image and have it encoded as a base 64 image all i get is "Bad Body" i have tried it both as content type "application/octet-stream" and "application/json" and get the same error. sample of the encoded looks like..and http request
POST https://api.projectoxford.ai/emotion/v1.0/recognize HTTP/1.1
Content-Type: application/octet-stream
Host: api.projectoxford.ai
Ocp-Apim-Subscription-Key: ••••••••••••••••••••••••••••••••
Content-Length: 129420
data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAASABIAAD/...
i get back:
Pragma: no-cache
Cache-Control: no-cache
Date: Fri, 01 Apr 2016 16:23:09 GMT
X-Powered-By: ASP.NET
Content-Length: 60
Content-Type: application/json; charset=utf-8
Expires: -1
{
"error": {
"code": "BadBody",
"message": "Invalid face image."
}
}
I am now not sure if you can send an image like this or not from Javascript. Can anyone tell me if my javascript is correct or if they can send an encoded base64 string image to the site.
thanks for your help
tim
This API does not accept data URIs for images. What you'll need to do is convert it to a binary blob. Though this answer is for a different Project Oxford API, you can apply the same technique.

Node js with express how to enable cache for static files

I have tried all the possibilities. Following are my response headers
HTTP/1.1 200 OK
Expires: Sun, 20 Dec 2015 03:53:42 GMT
Accept-Ranges: bytes
Cache-Control: public, max-age=604800
Last-Modified: Wed, 02 Dec 2015 15:49:21 GMT
Etag: W/"25571-15163623fb3"
Content-Type: image/png
Content-Length: 152945
Date: Sun, 13 Dec 2015 03:53:42 GMT
Connection: keep-alive
But still it's not returning 304 status. If I refresh command refresh in Firefox it's downloading that image again with 200 response code. I don't want that if user click command refresh it's should return 304 status. instead of downloading new file it should take it from browser cache.
Following is my nodejs code
var expireTime = 604800000;
app.all('/css*', function(req, res, next) {
res.header('Expires', new Date(Date.now() + expireTime).toUTCString());
next();
});
app.use('/css',express.static(conf.nodePath + 'css',{ maxAge: expireTime}));
Can anyone guide me how to enable perfect cache. Thanks in advance..

Show function provides json

My CouchDB show function will not run the provides('json', ...) function. It will run the html provides in certain cases though. Here is the show function:
function(doc, req) {
provides('json', function(){
return {'json': doc };
});
provides('html', function(){
return "<html><body>html string here</body></html>";
});
return {'json': {
'hello': "goodbye"
}
};
}
Here is a sample request when sending text/x-json. hello:goodbye is also returned if I use Accept: application/json
dave#ubuntu-laptop:~/py/liqc$ curl -i -H "Accept: text/x-json" http://127.0.0.1:8001/liqc/user-dave
HTTP/1.1 200 OK
Content-Length: 20
Vary: Accept
Server: CouchDB/1.0.2 (Erlang OTP/R14B)
ETag: "6V7EMSS64ZQ5SRLI0EYQVDWES"
Cache-Control: must-revalidate
Date: Mon, 27 Jan 2014 15:54:31 GMT
Content-Type: text/plain;charset=utf-8, text/x-json
{"hello":"goodbye"}
When I request text/html, I also get hello:goodbye. If I remove the final return of the show function however, application/json will continue to give me hello:goodbye, but text/html will give me the results I want!
dave#ubuntu-laptop:~/py/liqc$ curl -i -H "Accept: text/html" http://127.0.0.1:8001/liqc/user-dave
HTTP/1.1 200 OK
Content-Length: 42
Vary: Accept
Server: CouchDB/1.0.2 (Erlang OTP/R14B)
ETag: "9B8K3XGK28Y7RL2ART28WLL50"
Date: Mon, 27 Jan 2014 16:02:41 GMT
Content-Type: text/html; charset=utf-8
<html><body>html string here</body></html>
Am I doing something wrong or is this something going on with CouchDB? I am running a localhost reverse proxy to cloudant BTW. Thanks for any help.
You're not supposed to use a final return if you use provides. return supersedes any provides.
Plus, what do you expect to get when requesting JSON while your show function provides JSON in two different places? Use only provides and you will be fine.
About this:
If I remove the final return of the show function however, application/json will continue to give me hello:goodbye
There is no way you can get "hello":"goodbye" if you removed completely the final return. Maybe you forgot to update the design document? Debugging the wrong source code can be very frustrating…

Node.js: chunked transfer encoding

Is that code valid HTTP/1.1?
var fs = require('fs')
var http = require('http')
var buf=function(res,fd,i,s,buffer){
if(i+buffer.length<s){
fs.read(fd,buffer,0,buffer.length,i,function(e,l,b){
res.write(b.slice(0,l))
//console.log(b.toString('utf8',0,l))
i=i+buffer.length
buf(res,fd,i,s,buffer)
})
}
else{
fs.read(fd,buffer,0,buffer.length,i,function(e,l,b){
res.end(b.slice(0,l))
fs.close(fd)
})
}
}
var app = function(req,res){
var head={'Content-Type':'text/html; charset=UTF-8'}
switch(req.url.slice(-3)){
case '.js':head={'Content-Type':'text/javascript'};break;
case 'css':head={'Content-Type':'text/css'};break;
case 'png':head={'Content-Type':'image/png'};break;
case 'ico':head={'Content-Type':'image/x-icon'};break;
case 'ogg':head={'Content-Type':'audio/ogg'};break;
case 'ebm':head={'Content-Type':'video/webm'};break;
}
head['Transfer-Encoding']='chunked'
res.writeHead(200,head)
fs.open('.'+req.url,'r',function(err,fd){
fs.fstat(fd,function(err, stats){
console.log('.'+req.url+' '+stats.size+' '+head['Content-Type']+' '+head['Transfer-Encoding'])
var buffer = new Buffer(100)
buf(res,fd,0,stats.size,buffer)
})
})
}
http.createServer(app).listen(8000,"127.0.0.1")
console.log('GET http://127.0.0.1:8000/appwsgi/www/index.htm')
I think I am violating HTTP/1.1 here? Text files do seem to work fine, but that could be coincidental. Is my header "200 OK" or need it to be "100"? Is one header sufficient?
If you're doing chunked transfer encoding, you actually need to set that header:
Transfer-Encoding: chunked
You can see from the headers returned by google, which does chunked transfers for the homepage and most likely other pages:
HTTP/1.1 200 OK
Date: Sat, 04 Jun 2011 00:04:08 GMT
Expires: -1
Cache-Control: private, max-age=0
Content-Type: text/html; charset=ISO-8859-1
Set-Cookie: PREF=ID=f9c65f4927515ce7:FF=0:TM=1307145848:LM=1307145848:S=fB58RFtpI5YeXdU9; expires=Mon, 03-Jun-2013 00:04:08 GMT; path=/; domain=.google.com
Set-Cookie: NID=47=UiPfl5ew2vCEte9JyBRkrFk4EhRQqy4dRuzG5Y-xeE---Q8AVvPDQq46GYbCy9VnOA8n7vxR8ETEAxKCh-b58r7elfURfiskmrOCgU706msiUx8L9qBpw-3OTPsY-6tl; expires=Sun, 04-Dec-2011 00:04:08 GMT; path=/; domain=.google.com; HttpOnly
Server: gws
X-XSS-Protection: 1; mode=block
Transfer-Encoding: chunked
EDIT Yikes, that read is way too complicated:
var app = function(req,res){
var head={'Content-Type':'text/html'}
switch(req.url.slice(-3)){
case '.js':head={'Content-Type':'text/javascript'};break;
case 'css':head={'Content-Type':'text/css'};break;
case 'png':head={'Content-Type':'image/png'};break;
case 'ico':head={'Content-Type':'image/x-icon'};break;
case 'ogg':head={'Content-Type':'audio/ogg'};break;
case 'ebm':head={'Content-Type':'video/webm'};break;
}
res.writeHead(200,head)
var file_stream = fs.createReadStream('.'+req.url);
file_stream.on("error", function(exception) {
console.error("Error reading file: ", exception);
});
file_stream.on("data", function(data) {
res.write(data);
});
file_stream.on("close", function() {
res.end();
});
}
There you go, a nice streamed buffer for you to write with. Here's a blog post I wrote on different ways to read in files. I recommend looking that over so you can see how to best work with files in node's asynchronous environment.
Since Node.js implicitly sets 'Transfer-Encoding: chunked', all I needed to send in headers was the content type with charset like:
'Content-Type': 'text/html; charset=UTF-8'
Initially it was:
'Content-Type': 'text/html'
... which didn't work. Specifying "charset=UTF-8" immediately forced Chrome to render chunked responses.
Why are you doing all the fs operations manually? You'd probably be better off using the fs.createReadStream() function.
On top of that, my guess is that Chrome is expecting you to return a 206 response code. Check req.headers.range, and see if Chrome is expecting a "range" of the media file to be returned. If it is, then you will have to only send back the portion of the file requested by the web browser.
By why reinvent the wheel? There's tons of node modules that do this sort of thing for you. Try Connect/Express' static middleware. Good luck!

Resources