Creating signed URLs for Google Cloud Storage using NodeJS - node.js

I'm trying to create a signature for a privately stored file in Google Cloud Storage; so that I can distribute a time-limited link.
Currently doing this and it makes a signature that's too short ... where am I going wrong?
var crypto = require("crypto");
var ttl = new Date().getTime() + 3600;
var id = 'the_target_file.txt';
var bucketName = 'bucket_name';
var POLICY_JSON = "GET\n" + "\n" + "\n" + ttl + "\n" + '/' + bucketName + '/' + id;
// stringify and encode the policy
var stringPolicy = JSON.stringify(POLICY_JSON);
var base64Policy = Buffer(stringPolicy, "utf-8").toString("base64");
// sign the base64 encoded policy
var privateKey = "MY_PRIVATE_KEY";
var sha256 = crypto.createHmac("sha256", privateKey);
var signature = sha256.update(new Buffer(base64Policy, "utf-8")).digest("base64");
console.log ( signature );

There is an API/module for getting signed URLs now.
module: https://www.npmjs.com/package/#google-cloud/storage
API docs: https://googleapis.dev/nodejs/storage/latest/File.html#getSignedUrl
Example
var storage = require('#google-cloud/storage')();
var myBucket = storage.bucket('my-bucket');
var file = myBucket.file('my-file');
//-
// Generate a URL that allows temporary access to download your file.
//-
var request = require('request');
var config = {
action: 'read',
expires: '03-17-2025' // this could also include time (MM-DD-YYYYTHH:MM:SSZ)
};
file.getSignedUrl(config, function(err, url) {
if (err) {
console.error(err);
return;
}
// The file is now available to read from this URL.
request(url, function(err, resp) {
// resp.statusCode = 200
});
});

Realised what I was doing wrong ... I was hashing the policy string instead of signing it.
The below code now gives me the correct output.
var crypto = require("crypto");
var fs = require("fs");
var expiry = new Date().getTime() + 3600;
var key = 'the_target_file';
var bucketName = 'bucket_name';
var accessId = 'my_access_id';
var stringPolicy = "GET\n" + "\n" + "\n" + expiry + "\n" + '/' + bucketName + '/' + key;
var privateKey = fs.readFileSync("gcs.pem","utf8");
var signature = encodeURIComponent(crypto.createSign('sha256').update(stringPolicy).sign(privateKey,"base64"));
var signedUrl = "https://" + bucketName + ".commondatastorage.googleapis.com/" + key +"?GoogleAccessId=" + accessId + "&Expires=" + expiry + "&Signature=" + signature;
console.log(signedUrl);
For completeness ... here is a PHP version that does the same thing, which I used to check my results
$expiry = time() + 3600;
$key = 'the_target_file';
$bucketName = 'bucket_name';
$accessId = 'my_access_id';
$stringPolicy = "GET\n\n\n".$expiry."\n/".$bucketName."/".$key;
$fp = fopen('gcs.pem', 'r');
$priv_key = fread($fp, 8192);
fclose($fp);
$pkeyid = openssl_get_privatekey($priv_key,"password");
if (openssl_sign( $stringPolicy, $signature, $pkeyid, 'sha256' )) {
$signature = urlencode( base64_encode( $signature ) );
echo 'https://'.$bucketName.'.commondatastorage.googleapis.com/'.
$key.'?GoogleAccessId='.$accessId.'&Expires='.$expiry.'&Signature='.$signature;
}

Assuming this question is to sign the CDN url backed by google bucket backend, here what works for me (code above did not work for me).
Opts and signing function calling:
const signUrlOptions = {
expires: '' + new Date().getTime() + 3600, // one hour
keyName: '_SIGNING_KEY_NAME_', // URL signing key name (the one one you created in the CDN backend bucket)
keyBase64: '_SIGNING_KEY_BASE64_', // the URL signing key base64 content (base64-encoded, 128-bit value, ~24 characters)
baseUrl: '_CDN_BASE_URL_' // your base CDN URL (can be IP http://123.... when dev env or https://cdn_dns_name or https dns name)
}
const signedUrl = signCdnUrl('demo.png', signedUrlOptions);
signing function:
import { createHmac } from 'crypto';
const BASE64_REPLACE = { '+': '-', '/': '_', '=': '' };
export function signCdnUrl(fileName, opts) {
// URL to sign
const urlToSign = `${opts.baseUrl}/${fileName}?Expires=${opts.expires}&KeyName=${opts.keyName}`;
// Compute signature
const keyBuffer = Buffer.from(opts.keyBase64, 'base64');
let signature = createHmac('sha1', keyBuffer).update(urlToSign).digest('base64');
signature = signature.replace(/[+/=]/g, c => (<any>BASE64_REPLACE)[c]); // might be a better way
// Add signature to urlToSign and return signedUrl
return urlToSign + `&Signature=${signature}`;
}
Hope this helps. Somehow google cloud doc does not have a nodejs example and the file.getSignedUrl() add confusion to the mix as it does not seem to be related to CDN URL signing.
Note:
Note: Probably want to move base64 -> buffer work to the caller as opts.keyBuffer

If nodejs #google-cloud/storage library is already part of your project then best way is to use it. Below code is by google storage sdk docs for nodejs Link here
npm install #google-cloud/storage
function main(bucketName = 'you_bucket_name', filename = 'your_file_path_without_bucket_name') {
const {Storage} = require('#google-cloud/storage');
// Creates a client (Parameters not required if you are already in GCP environment)
const storage = new Storage({
projectId: 'your_project_id',
keyFilename: './json_key_path_for_some_service_account.json'
});
async function generateV4ReadSignedUrl() {
// These options will allow temporary read access to the file
const options = {
version: 'v4',
action: 'read',
expires: Date.now() + 15 * 60 * 1000, // 15 minutes
};
// Get a v4 signed URL for reading the file
const [url] = await storage
.bucket(bucketName)
.file(filename)
.getSignedUrl(options);
console.log('Generated GET signed URL:');
console.log(url);
console.log('You can use this URL with any user agent, for example:');
console.log(`curl '${url}'`);
}
generateV4ReadSignedUrl().catch(console.error);
// [END storage_generate_signed_url_v4]
}
main(...process.argv.slice(2));

Related

Use original file name in AWS s3 uploader

I have implemented a s3 uploader per these instructions https://aws.amazon.com/blogs/compute/uploading-to-amazon-s3-directly-from-a-web-or-mobile-application/
This is the Lambda function code
AWS.config.update({ region: process.env.AWS_REGION })
const s3 = new AWS.S3()
const URL_EXPIRATION_SECONDS = 300
// Main Lambda entry point
exports.handler = async (event) => {
return await getUploadURL(event)
}
const getUploadURL = async function(event) {
const randomID = parseInt(Math.random() * 10000000)
const Key = `${randomID}.jpg`
// Get signed URL from S3
const s3Params = {
Bucket: process.env.UploadBucket,
Key,
Expires: URL_EXPIRATION_SECONDS,
Currently the filename (key) is generated using a random ID.
I would like to change that to use the original filename of the uploaded file.
I tried a couple approaches such as using the the fs.readfile() to get the filename but have not had any luck.
There is a webpage with a form that works in conjunction with the Lambda to upload the file to s3.
How do I get the filename?
If you want to save the file with the original filename, you have to pass that filename as part of the key you use to request the signed url. You don't show how you're getting the file to upload, but if it is part of a web site, you get this from the client.
On the client side you have the user identify the file to upload and pass that to your code that calls getUploadURL(). Maybe in your code it is part of event? Then you send the signed URL back to the client and then the client can send the file to the signed URL.
Therefore to upload a file, your client has to send two requests to your server -- one to get the URL and one to upload the file.
You do mention that you're using fs.readFile() If you're able to get the file with this call, then you already have the file name. All you have to do is pass the same name to getUploadURL() as an additional parameter or as part of event. You may have to parse the filename first or within getUploadURL() if it includes a path to someplace other than your current working directory.
The code above looks like it may be a Lambda that's getting called with some event. If that event is a trigger of some sort that you can include a file name, then you can look pull it from that variable. For example:
const getUploadURL = async function(event) {
const randomID = parseInt(Math.random() * 10000000)
const Key = `${event.fileNameFromTrigger}`
// Get signed URL from S3
const s3Params = {
Bucket: process.env.UploadBucket,
Key,
Expires: URL_EXPIRATION_SECONDS.
...
}
If the file name includes the extension, then you don't need to append that as you were with the random name.
I modified the Lambda
changed this
const randomID = parseInt(Math.random() * 10000000)
const Key = `${randomID}.jpg`
to this
const Key = event.queryStringParameters.filename
And this the frontend code with my endpoint redacted. Note the query ?filename= appended to the endpoint and how I used this.filename = file.name
<script>
const MAX_IMAGE_SIZE = 1000000
/* ENTER YOUR ENDPOINT HERE */
const API_ENDPOINT = '{api-endpoint}/uploads?filename=' // e.g. https://ab1234ab123.execute-api.us-east-1.amazonaws.com/uploads
new Vue({
el: "#app",
data: {
image: '',
uploadURL: '',
filename: ''
},
methods: {
onFileChange (e) {
let files = e.target.files || e.dataTransfer.files
//let filename = files[0].name
if (!files.length) return
this.createImage(files[0])
},
createImage (file) {
// var image = new Image()
let reader = new FileReader()
reader.onload = (e) => {
//console.log(file.name)
console.log('length: ', e.target.result.includes('data:image/jpeg'))
if (!e.target.result.includes('data:image/jpeg')) {
return alert('Wrong file type - JPG only.')
}
if (e.target.result.length > MAX_IMAGE_SIZE) {
return alert('Image is loo large.')
}
this.image = e.target.result
this.filename = file.name
}
reader.readAsDataURL(file)
},
removeImage: function (e) {
console.log('Remove clicked')
this.image = ''
this.filename = ''
},
uploadImage: async function (e) {
console.log('Upload clicked')
// Get the presigned URL
const response = await axios({
method: 'GET',
url: API_ENDPOINT + this.filename
})
console.log('Response: ', response)
console.log('Uploading: ', this.image)
let binary = atob(this.image.split(',')[1])
let array = []
for (var i = 0; i < binary.length; i++) {
array.push(binary.charCodeAt(i))
}
let blobData = new Blob([new Uint8Array(array)], {type: 'image/jpeg'})
console.log('Uploading to: ', response.uploadURL)
const result = await fetch(response.uploadURL, {
method: 'PUT',
body: blobData
})
console.log('Result: ', result)
// Final URL for the user doesn't need the query string params
this.uploadURL = response.uploadURL.split('?')[0]
}
}
})
</script>

How to authorize with AWS signature 4 -> API Gateway -> Lambda

I've googled around a lot with no luck in finding the solution to my problem. I've read through the entire authentication process for AWS Signature 4 and followed their tutorial as well as view other sources. I'm trying to have client side authentication for a desktop application that makes request to API Gateway.
When I use Postman it works properly but I tried generating my own signature in Nodejs but to no avail, I keep getting 403 messages back from the call.
The function below returns the authenticated requestUrl which is then run by axios.get(requestUrl). When I use the Postman generated request it works perfectly fine but, once I use my generated request I have problems.
Am I missing something while authenticating? Here is what my code currently looks like:
function Authorize() {
const host = "EXAMPLE.execute-api.us-east-1.amazonaws.com"
const reg = 'us-east-1'
const meth = 'GET'
const serv = 'execute-api'
const endpoint = '/development/putImage'
// Keys
let access = "EXAMPLE"
let key = "KEY"
// Get Date
let t = new Date();
let amzDate = t.toJSON().replace(/[-:]/g, "").replace(/\.[0-9]*/, "");
let dateStamp = t.toJSON().replace(/-/g, "").replace(/T.*/, "");
// ************* TASK 1: CREATE CANONICAL REQUEST *************
// Create Canonical Request
let canonical_uri=endpoint
let canonical_headers="host: "+host+"\n"
let signedHeaders = 'host'
let algorithm = 'AWS4-HMAC-SHA256'
let credentialScope = dateStamp + "/" + reg + "/" + serv + "/" + "aws4_request"
// Set query string
let canonicalQueryString = ""
canonicalQueryString += "X-Amz-Date=" + amzDate
canonicalQueryString += "&X-Amz-Algorithm=" + algorithm;
canonicalQueryString += "&X-Amz-Credential=" + encodeURIComponent(access + "/" + credentialScope)
canonicalQueryString += "&X-Amz-SignedHeaders=" + signedHeaders
// Empty payload for get request
var payloadHash = crypto.createHash('sha256').update('').digest('hex');
// Set canonical request
var canonicalRequest = meth + "\n" + canonical_uri + "\n" + canonicalQueryString + "\n" + canonical_headers + "\n" + signedHeaders + "\n" + payloadHash
console.log(canonicalRequest)
// ************* TASK 2: CREATE THE STRING TO SIGN*************
let stringToSign = algorithm + '\n' + amzDate + '\n' + credentialScope + '\n' + crypto.createHash('sha256').update(canonicalRequest).digest('hex');
// ************* TASK 3: CALCULATE THE SIGNATURE *************
var signingKey = getSignatureKey(key, dateStamp, reg, serv)
var signature = crypto.createHmac('sha256', signingKey).update(stringToSign).digest('hex');
// ************* TASK 4: ADD SIGNING INFORMATION TO THE REQUEST *************
canonicalQueryString += '&X-Amz-Signature=' + signature
let requestUrl = "https://"+host+ endpoint + "?" + canonicalQueryString
console.log(requestUrl)
return requestUrl
}
The below code worked for me well. For more info, please visit https://docs.aws.amazon.com/opensearch-service/latest/developerguide/request-signing.html#request-signing-node
const { HttpRequest} = require("#aws-sdk/protocol-http");
const { defaultProvider } = require("#aws-sdk/credential-provider-node");
const { SignatureV4 } = require("#aws-sdk/signature-v4");
const { NodeHttpHandler } = require("#aws-sdk/node-http-handler");
const { Sha256 } = require("#aws-crypto/sha256-browser");
...
var request = new HttpRequest({
body: JSON.stringify({"users":["G0000000B","G0000000A"]}),
headers: {
'Content-Type': 'application/json',
'apiKey':'XXXXXXXXXXXX',
'apiSecret': 'XXXXXXXXXXXXXXXXXX',
'host': 'service2.xxx.xxx.xx'
},
hostname: 'service2.xxx.xxx.xx',
method: 'POST',
path: 'API/user/list'
});
var signer = new SignatureV4({
credentials: defaultProvider(),
region: 'ap-southeast-1',
service: 'execute-api',
sha256: Sha256
});
const signedRequest = await signer.sign(request);
// Send the request
var client = new NodeHttpHandler();
var { response } = await client.handle(signedRequest)
console.log(response.statusCode + ' ' + response.body.statusMessage);
var responseBody = '';
await new Promise(() => {
response.body.on('data', (chunk) => {
responseBody += chunk;
});
response.body.on('end', () => {
console.log('Response body: ' + responseBody);
});
}).catch((error) => {
console.log('Error: ' + error);
});

Is it possible to create a JSON Web Token using Azure Key Vault's Keys API?

I am currently using the jsonwebtokens package for generating signed JWTs with a RSA signing key. I was experimenting to see how I can move to Azure Key Vault. One way I can do is to store the private and public keys as Vault Secrets. But I noticed that there's also a Vault Keys API which generates a key in Vault and signs data which I provide it.
I've been trying with the #azure/keyvault-keys package and this is where I've got:
async signJsonWebToken(data: object, expiry: string): Promise<string> {
const headerB64 = this.base64url(JSON.stringify(this.keyHeader), 'binary');
const payloadB64 = this.base64url(this.getTokenData(data, expiry), 'utf8');
const payload = `${headerB64}.${payloadB64}`;
const key = await this.keyClient.getKey(this.KEY_NAME);
const cryptClient = new CryptographyClient(key, new DefaultAzureCredential());
const hash = crypto.createHash('sha256');
const digest = hash.update(payload).digest();
const signResult = await cryptClient.sign('RS256', digest);
const signResultB64 = this.base64url(signResult.result.toString(), 'utf8');
const result = `${payload}.${signResultB64}`;
this.logger.log('Key: ' + key.key);
this.logger.log('Sign result: ' + result);
return result;
}
private base64url(data: string, encoding: string) {
return SBuffer
.from(data, encoding)
.toString('base64')
.replace(/=/g, '')
.replace(/\+/g, '-')
.replace(/\//g, '_');
}
private getTokenData(data: object, expiry: string): string {
const now = Date.now();
const expiresIn = new Date();
if (expiry.endsWith('d')) {
expiresIn.setDate(expiresIn.getDate() + parseInt(expiry));
} else if (expiry.endsWith('h')) {
expiresIn.setHours(expiresIn.getHours() + parseInt(expiry));
} else if (expiry.endsWith('m')) {
expiresIn.setMinutes(expiresIn.getMinutes() + parseInt(expiry));
}
const tokenData = Object.assign({
iat: now,
exp: expiresIn.getTime()
}, data);
return JSON.stringify(tokenData);
}
The generated signed signature do not look anywhere close to what I'd usually be getting with the jsonwebtoken package. My intention is that I'd like to sign my tokens with Vault but would like to verify with jsonwebtoken.verify(). Is this even possible? What am I doing wrong in my code?
Since you use the Azure kay vault to sign jwt, we also can use Azure key vault to Verify the jwt
For example
const key = await this.keyClient.getKey(this.KEY_NAME);
const cryptClient = new CryptographyClient(key, new DefaultAzureCredential());
const util =require('util')
const base64 = require('base64url');
const JWT=""
const jwtHeader = JWT.split('.')[0];
const jwtPayload = JWT.split('.')[1];
const jwtSignature = JWT.split('.')[2];
const signature = base64.toBuffer(jwtSignature)
const data = util.format('%s.%s', jwtHeader, jwtPayload);
const hash = crypto.createHash('sha256');
const digest = hash.update(data).digest()
const verified =await cryptClient.verify("RS256",digest,signature)
Besides If you want to use jsonwebtoken package to verify jet, please refer to the following code
const util =require('util')
const base64 = require('base64url');
const forge = require('node-forge');
const jwt = require('jsonwebtoken')
async Function test{
// gerenrate jwt
const headerObj = {
alg: 'RS256',
typ: 'JWT'
};
const payloadObj = {
sub: '1234567890',
name: 'John Doe'
};
const encodedHeader = base64(JSON.stringify(headerObj));
const encodedPayload = base64(JSON.stringify(payloadObj));
const data = util.format('%s.%s', encodedHeader, encodedPayload);
const hash = crypto.createHash('sha256');
const digest = hash.update(data).digest()
const keyClient =new KeyClient("https://testsql.vault.azure.net/",new DefaultAzureCredential());
const key =await keyClient.getKey("test");
const cryptClient = new CryptographyClient(key.id, new DefaultAzureCredential());
const signResult = await cryptClient.sign("RS256",digest)
const jwts =util.format('%s.%s.%s', encodedHeader, encodedPayload,base64(signResult.result));
console.log(jwts)
// verify
// convert azure key vault ket to public key
var n =Buffer.from(key.key.n).toString("base64")
var e =Buffer.from(key.key.e).toString("base64")
var publicKey = forge.pki.setRsaPublicKey(
base64urlToBigInteger(n),
base64urlToBigInteger(e));
// convert public key to pem file
var pem = forge.pki.publicKeyToPem(publicKey);
var d= jwt.decode(jwts,pem.toString())
console.log(d)
}
function base64urlToBigInteger(str) {
var bytes = forge.util.decode64(
(str + '==='.slice((str.length + 3) % 4))
.replace(/\-/g, '+')
.replace(/_/g, '/'));
return new forge.jsbn.BigInteger(forge.util.bytesToHex(bytes), 16);
};

How to scheduling push notifications using azure sdk for node

I know it is possible in .net, i can see the reference over here https://learn.microsoft.com/en-us/azure/notification-hubs/notification-hubs-send-push-notifications-scheduled. But I want to know how to do that in node. Can any one guide me on this.
You can send a scheduled notification in Node using the REST API. Use the specification for sending a normal notification and replace /messages with /schedulednotifications. You will also need to add a header specifying the datetime named ServiceBusNotification-ScheduleTime.
For an example using the template schema:
var CryptoJS = require("crypto-js");
var axios = require("axios");
var getSelfSignedToken = function(targetUri, sharedKey, keyName,
expiresInMins) {
targetUri = encodeURIComponent(targetUri.toLowerCase()).toLowerCase();
// Set expiration in seconds
var expireOnDate = new Date();
expireOnDate.setMinutes(expireOnDate.getMinutes() + expiresInMins);
var expires = Date.UTC(expireOnDate.getUTCFullYear(), expireOnDate
.getUTCMonth(), expireOnDate.getUTCDate(), expireOnDate
.getUTCHours(), expireOnDate.getUTCMinutes(), expireOnDate
.getUTCSeconds()) / 1000;
var tosign = targetUri + '\n' + expires;
// using CryptoJS
var signature = CryptoJS.HmacSHA256(tosign, sharedKey);
var base64signature = signature.toString(CryptoJS.enc.Base64);
var base64UriEncoded = encodeURIComponent(base64signature);
// construct autorization string
var token = "SharedAccessSignature sr=" + targetUri + "&sig="
+ base64UriEncoded + "&se=" + expires + "&skn=" + keyName;
// console.log("signature:" + token);
return token;
};
var keyName = "<mykeyName>";
var sharedKey = "<myKey>";
var uri = "https://<mybus>.servicebus.windows.net/<myhub>";
var expiration = 10;
var token = getSelfSignedToken(uri, sharedKey, keyName, expiration);
const instance = axios.create({
baseURL: uri,
timeout: 100000,
headers: {
'Content-Type': 'application/octet-stream',
'X-WNS-Type': 'wns/raw',
'ServiceBusNotification-Format' : 'template',
'ServiceBusNotification-ScheduleTime': '2019-07-19T17:13',
'authorization': token}
});
var payload = {
"alert" : " This is my test notification!"
};
instance.post('/schedulednotifications?api-version=2016-07', payload)
.then(function (response) {
console.log(response);
}).catch(function (error) {
// handle error
console.log(error);
});

Nodejs post request to get oauth2 token for a service account

I'm trying to get an oauth2 token for my service account using nodejs. I'm following the documentation found here:
https://developers.google.com/identity/protocols/OAuth2ServiceAccount#makingrequest
While there isn't a node example I've looked at the HTTP/Rest documentation to get a rough idea of what it expects when making a request for a token. However the response I'm getting back is:
Invalid JWT Signature.
As a general overview when computing the signature you take the base64url encoded value for the header and claim, hash it with sha256 and the private key from the google developer console, and then base64url encode that value.
So my header is:
var header = {"alg":"RS256","typ":"JWT"}
var encodedHeader = base64url(new Buffer(header).toString('utf8'));
The documentation above even lists what this value will be after base64url encoding it:
eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9
And when I output my encodedHeader value:
eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9
Oh excellent it matches.So there shouldn't be a 64burl encoding issue for the other values. Next is the claim:
var claim ={"aud":"https://www.googleapis.com/oauth2/v4/token","scope":"https://www.googleapis.com/auth/gmail.send","iss":"*****censoredvalue*****gserviceaccount.com","exp":1505399833,"iat":1505396233}
It goes through the same process and is encoded.(Can't show for security reasons).
var encodedClaim = base64url(new Buffer(claim).toString('utf8'));
For the moment I'm using third party sites to compute the signature for testing purposes but the input is encodedHeader.encodedClaim + Private Key(value being: -----BEGIN PRIVATE KEY....) and using SHA256. I'll then take that output and b64url encode it.
I now have a JWT that is encodedHeader.encodedClaim.encodedSignature
var encoded_jwt = encodedHeader + '.' + encodedClaim + '.' + encodedSignature;
I'll make a request to the token endpoint with the following:
// Set the headers
var headers = {
'HTTP-Version': 'HTTP/1.1',
'Content-Type': 'application/x-www-form-urlencoded'
}
var bodyOptions = { 'grant_type': "urn:ietf:params:oauth:grant-type:jwt-bearer", assertion: encoded_jwt }
// Configure the request
var options = {
url: 'https://www.googleapis.com/oauth2/v4/token',
method: 'POST',
headers: headers,
form: bodyOptions,
json: true
}
// Start the request
request(options, function (error, response, body) {
if (!error && response.statusCode == 200) {
// Print out the response body
console.log(body)
}
else{
context.log(error);
context.log(response);
}
});
And this is where I get the response of an error saying that I have an invalid signature.
Any ideas as to why it's an invalid signature. I've also tried several signature generating sites to be sure it wasn't that one specific one but it's always the same error.
To compute JWS, you can use crypto module like the following :
const privateKey = fs.readFileSync('private.pem', 'utf-8');
const signer = crypto.createSign('sha256');
signer.update(data);
const signature = signer.sign(privateKey, 'base64');
where data is encodedHeader + '.' + encodedClaim
A complete example taken from google-oauth-library testcase :
"use strict";
const crypto = require('crypto');
const fs = require('fs');
const publicKey = fs.readFileSync('public.pem', 'utf-8');
const privateKey = fs.readFileSync('private.pem', 'utf-8');
const maxLifetimeSecs = 86400;
const now = new Date().getTime() / 1000;
const expiry = now + (maxLifetimeSecs / 2);
const idToken = '{' +
'"iss":"testissuer",' +
'"aud":"testaudience",' +
'"azp":"testauthorisedparty",' +
'"email_verified":"true",' +
'"id":"123456789",' +
'"sub":"123456789",' +
'"email":"test#test.com",' +
'"iat":' + now + ',' +
'"exp":' + expiry + '}';
const envelope = '{' +
'"kid":"keyid",' +
'"alg":"RS256"' +
'}';
let data = new Buffer(envelope).toString('base64') + '.' +
new Buffer(idToken).toString('base64');
const signer = crypto.createSign('sha256');
signer.update(data);
const signature = signer.sign(privateKey, 'base64');
data += '.' + signature;
console.log(data);
The following snippet using the 'crypto' module worked well for me to generate a JWS using RSA-SHA256 signing algorithm and a private key. And finally using it to create the JWT required for an OAuth server.
import { createVerify, createSign } from 'crypto';
const privateKey = 'read from a file or key store';
const publicKey = 'read from a file or key store'; // only if signature verification is required
// header and claim - basis of the input that is going to be signed by the private key
const header = { alg: 'RS256', typ: 'JWT' };
const claim = {
iss: 'MY_ISSUER',
scope: '',
aud: 'https://example.com/path/to/oAuth/token.json',
iat: Date.now(),
exp: Date.now() + 60 * 60 * 1000, // setting 1 hr
};
// create base64 of the header and claim
const base64Header = Buffer.from(JSON.stringify(header), 'utf8').toString(
'base64'
);
const base64Claim = Buffer.from(JSON.stringify(claim), 'utf8').toString(
'base64'
);
// create the input from the encoded header and claim;
// this is the input that is going to be signed by the secret key
const inputForSignature = `${base64Header}.${base64Claim}`;
// create the signing object using 'RSA-SHA256' algo,
// and add the input to it
const sign = createSign('RSA-SHA256');
sign.update(inputForSignature, 'utf8');
sign.end();
// sign the input using your private key and get the signature in base64 format
const base64Signature = sign.sign(privateKey, 'base64');
// optionally, if you want to verify the signature using the public key
const verify = createVerify('RSA-SHA256');
verify.write(inputForSignature);
verify.end();
if(verify.verify(publicKey, base64Signature, 'base64')) {
console.log('signature verified');
} else {
console.log('signature could not be verified');
}
// form the JWT by concatenating the above encoded parameters separated by dot (".")
const JWT = `${base64Header}.${base64Claim}.${base64Signature}`;
console.log(JWT); // you are looking for

Resources