In Node.js I can do:
var http = require('http')
var url = 'mylinkedsite.com'
http.get({ host: url }, function(){...})
In Dart I'm trying the same:
import 'dart:io';
import 'dart:uri';
var url = 'mylinkedsite.com';
var client = new HttpClient();
var uri = new Uri.fromString(url);
var connection = client.getUrl(uri);
connection.onResponse = (res){...};
But I am unable to get the same result without the url.scheme and url.path explicitly added. Both of which are final. What's the simplest way to accomplish the same result.
The HttpClient in dart:io supports strings as hostnames:
var conn = httpClient.get("http://mylinkedsite.com", 80, "/");
conn.onResponse = (HttpClientResponse response) {
if(response.statusCode != 200) {
// unexpected return code
} else {
// success, parse response
}
};
conn.onError = (e) => completer.completeException(e);
only other method is http helper method to hide the URI call's behind a more node like interface.
Related
fairly noobish at aes-ecb encryption, apologies in advance.
I am trying to make an API call to a server that requires a encrypted payload.
I'm having a little trouble in the sequence in which i encrypt my data converting between base64 and ascii.
When i run the below code, i keep getting
The header content contains invalid characters.
I suspect it may be the way i'm converting the encrypted data between types but not entirely sure. Any thoughts would be really appreciated.
var request = require("request");
var aesjs = require('aes-js');
var apiKey = "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX";
var privateKey = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX";
var method = 'GET';
var url = 'https://apiurl.com';
var agent = "app /1.0 Android/8.1.0 (Android)";
var options = {
method: method,
url: url,
headers:
{ 'X-API-KEY': apiKey,
'X-CSN-USER-AGENT': agent,
'apiData' : GetAndEncryptApiData(method, url)
}
};
request(options, function (error, response, body) {
if (error) throw new Error(error);
console.log(body);
});
function GetAndEncryptApiData(method, url){
var isoTimestamp = new Date().toISOString();;
var text = `uri:${url} \nmethod:${method}\ntimestamp:${isoTimestamp}`;
var key = Buffer.from(privateKey, 'base64');
var aesEcb = new aesjs.ModeOfOperation.ecb(key);
var textToBytes = aesjs.utils.utf8.toBytes(text);
var paddedData = aesjs.padding.pkcs7.pad(textToBytes);
var encryptedData = aesEcb.encrypt(paddedData);
var output = Buffer.from(encryptedData).toString('ascii');
return output;
}
I7m trying to create a simple http2 server and want to make use of Http2Stream in the http2 module to push large assets. How can I incorporate it in my Koa2 app? currently my server is just a middleware that receives the ctx and next object and checks if the file exists and tries to send it out.
async server(ctx, next, ...arg){
//check if file exists
//if it exists, set the headers and mimetype and send file
}
Does the ctx object contain the functions needed to work with http2stream, or how can i extend it?
You can utilize the stream in ctx.res (which is the original node response) like so: ctx.res.stream
Working example: Koa2 with http/2 - this one gets a file in the public folder (filename ist hardcoded here) and sends it through the stream (which then should be the http2stream). Just type https://localhost:8080/file in your browser. You need to place a file thefile.html in to ./public:
'use strict';
const fs = require('fs');
const http2 = require('http2');
const koa = require('koa');
const app = new koa();
const options = {
key: fs.readFileSync('./key.pem'),
cert: fs.readFileSync('./cert.pem'),
passphrase: 'test'
};
function getFile(path) {
const filePath = `${__dirname}/public/${path}`;
try {
const content = fs.openSync(filePath, 'r');
const contentType = 'text/html';
return {
content,
headers: {
'content-type': contentType
}
};
} catch (e) {
return null;
}
}
// response
app.use(ctx => {
if (ctx.request.url === '/file') {
const file = getFile('thefile.html');
ctx.res.stream.respondWithFD(file.content, file.headers);
} else {
ctx.body = 'OK' ;
}
});
const server = http2.createSecureServer(options, app.callback());
console.log('Listening on port 8080');
server.listen(8080);
Hope that helps
I know there is a very similar question here: Testing download links with Nightwatch.js, but this case is slighly different.
I'm trying to test a download using nightwatch too, but it's not that simple due to the browser behavious which present a popup window to the user.
What i'm trying to do is to just download the file with node, and not using selenium (as in this python version: https://stackoverflow.com/a/24998532/447074)
So I made this custom assertion :
downloadFile.js :
var util = require('util');
var http = require('http');
var url = require('url');
exports.assertion = function(options, expected_status, msg) {
this.message = msg || util.format('Testing if authenticated file download works');
this.file_url = options.file_url;
this.cookie_content = options.cookie_content;
this.expected = expected_status;
this.pass = function(value) {
return this.expected == value
};
this.value = function(response) {
return response.statusCode;
};
this.command = function(callback) {
var options = url.parse(this.file_url);
options.headers = {
'Cookie': this.cookie_content
};
http.get(options, callback);
return true;
};
};
And using it in a test case like this:
// I would not use this here, but I added this for this post...
"Get cookie": function(browser) {
var settings = browser.globals;
var state = browser.globals.state;
browser
.getCookies(function(response) {
state.sessionid = "sessionid=" + response['value'].filter(function(cookie_value){
return cookie_value['name'] == 'sessionid';
})[0]['value'];
})
},
"An authenticated user can download the file": function(browser) {
var settings = browser.globals;
var state = browser.globals.state;
browser
.assert.downloadFile({file_url: "http://example.com/somefile.pdf", cookie_content: state.sessionid}, 200, "Testing Authenticated download");
},
"An unauthenticated user can not download the file": function(browser) {
var settings = browser.globals;
var state = browser.globals.state;
browser
.assert.downloadFile({file_url: "http://example.com/somefile.pdf", cookie_content: "sessionid= wrong-session;"}, 302, "Testing Unauthenticated dowload");
},
It's "almost" working, but somehow the test "stops" after the first test, just after the first downloadFile assertion.
Here is the result from the console
nightwatch -c ./nightwatch.json --test tests/documents/upload_test.js
[Upload Test] Test Suite
========================
Setting up...
Running: Get cookie
No assertions ran.
Running: An authenticated user can download the file
✔ Testing Authenticated dowload
Is it ok to make this kind of assertion in nightwatch?
Why does it stops the tests?
Thanks!
Ok I found the solution by reading the custom assertion doc again: http://nightwatchjs.org/guide#custom-assertions
The command function of the assertion needs to return this http://nightwatchjs.org/guide#custom-assertions.
Here is the downloadFile.js assertion :
var util = require('util');
var http = require('http');
var url = require('url');
exports.assertion = function(options, expected_status, msg) {
this.message = msg || util.format('Testing if authenticated file download works');
this.file_url = options.file_url;
this.cookie_content = options.cookie_content;
this.expected = expected_status;
this.pass = function(value) {
return this.expected == value
};
this.value = function(response) {
return response.statusCode;
};
this.command = function(callback) {
var options = url.parse(this.file_url);
options.headers = {
'Cookie': this.cookie_content
};
http.get(options, callback);
return this;
};
};
I'm trying to upload file and send params in the same request it's possible with filetransfer but i have a problem in the server side the req.body is always empty i'm using formidable module
this is the client side
upload = function (imageURI) {
var ft = new FileTransfer(),
options = new FileUploadOptions();
options.fileKey = "file";
options.fileName = 'filename.jpg'; // We will use the name auto-generated by Node at the server side.
options.mimeType = "image/jpeg";
options.chunkedMode = false;
var params = {};
params.value1 = "test";
params.value2 = "param";
options.params = params;
alert(imageURI);
ft.upload(imageURI, serverURL + "/upload",
function (e) {
getFeed();
},
function (e) {
alert("Upload failed");
}, options);
},
this is the server side
var form = new formidable.IncomingForm();
form.parse(req, function(error, fields, files) {
console.log(req.body.value1);
console.log("Traitement terminé");
i found the problem i had to replace
console.log(req.body.value1);
by
console.log(fields.value1);
I have a windows 8 application connecting to a web service written in Node.js. On the windows 8 side I compressed my request body to gzip. But on the Node.js side I found that my req.body type was Object.
I cannot use zlib to uncomporess the body since it's not a stream.
I can use zlib to uncomporess the req, but I don't know how to retrieve the req.body content from the unzipped stream and parse the body in JSON format.
BTW, I reviewed my request through Fiddler and it told me the request body was gzipped, and I can see my raw body through Fiddler after unzipped so the request should be correct.
Updated
Below is my Node.js app
(function () {
var express = require("express");
var zlib = require("zlib");
var app = express();
var port = 12345;
app.configure(function () {
app.use(express.compress());
app.use(express.bodyParser());
});
app.post("/test", function (req, res) {
var request = req.body;
req.pipe(zlib.createGunzip());
var response = {
status: 0,
value: "OK"
};
res.send(200, response);
});
console.log("started at port %d", port);
app.listen(port);
})();
And below is my windows store app code (partial)
private async void button1_Click_1(object sender, RoutedEventArgs e)
{
var message = new
{
Name = "Shaun",
Value = "12345678901234567890123456789012345678901234567890"
};
var json = await JsonConvert.SerializeObjectAsync(message, Formatting.Indented);
var bytes = Encoding.UTF8.GetBytes(json);
var client = new HttpClient();
client.BaseAddress = new Uri("http://192.168.56.1:12345/");
client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json"));
client.DefaultRequestHeaders.ExpectContinue = false;
var jsonContent = new JsonContent(message);
var gzipContent = new GZipContent3(jsonContent);
var res = await client.PostAsync("test", gzipContent);
var dialog = new Windows.UI.Popups.MessageDialog(":)", "完成");
await dialog.ShowAsync();
}
internal class GZipContent3 : ByteArrayContent
{
public GZipContent3(HttpContent content)
: base(LoadGZipBytes(content))
{
//base.Headers.ContentType = content.Headers.ContentType;
base.Headers.ContentType = new MediaTypeHeaderValue("x-application/x-gzip");
base.Headers.ContentEncoding.Add("gzip");
}
private static byte[] LoadGZipBytes(HttpContent content)
{
var source = content.ReadAsByteArrayAsync().Result;
byte[] buffer;
using (var outStream = new MemoryStream())
{
using (var gzip = new GZipStream(outStream, CompressionMode.Compress, true))
{
gzip.Write(source, 0, source.Length);
}
buffer = outStream.ToArray();
}
return buffer;
}
}
internal class JsonContent : StringContent
{
private const string defaultMediaType = "application/json";
public JsonContent(string json)
: base(json)
{
var mediaTypeHeaderValue = new MediaTypeHeaderValue(defaultMediaType);
mediaTypeHeaderValue.CharSet = Encoding.UTF8.WebName;
base.Headers.ContentType = mediaTypeHeaderValue;
}
public JsonContent(object content)
: this(GetJson(content))
{
}
private static string GetJson(object content)
{
if (content == null)
{
throw new ArgumentNullException("content");
}
var json = JsonConvert.SerializeObject(content, Formatting.Indented);
return json;
}
}
http://www.senchalabs.org/connect/json.html. Basically you need to write your own middleware based on connect.json() that pipes through an uncompression stream like connect.compress() but the opposite direction: http://www.senchalabs.org/connect/compress.html
Also, make sure you're sending the correct Content-Encoding header in your request.
If you show me what you have so far I can help you further.
I was working on similar thing and finally landed on
function getGZipped(req, callback) {
var gunzip = zlib.createGunzip();
req.pipe(gunzip);
var buffer = [];
gunzip.on('data', function (data) {
// decompression chunk ready, add it to the buffer
buffer.push(data);
}).on('end', function () {
//response and decompression complete, join the buffer and return
callback(null, JSON.parse(buffer));
}).on('error', function (e) {
callback(e);
});
}