I have mocked up the response from the front end in Node.js as seen below.
the attestationObject parameter is what is returned once the Yubikey has signed the challenge and its been converted to base64 for transport to the node server.
What i'm getting is an ArrayBuffer { byteLength: 226 } but I have no idea what to do with it.
I know i need to check the domain name the signed it and I need to store something with the users credentials so they can login again.
I understand there is loads of options, I just want to get a bare minimum passwordless register and login working.
const cbor = require("cbor");
const attestationObject = "o2NmbXRkbm9uZWdhdHRTdG10oGhhdXRoRGF0YVjE4mQ5WmgO3yl24XjxRqkP9LjqRYP-GsIubALB-5K_CK5FXMrOUa3OAAI1vMYKZIsLJfHwVQMAQABcapsmHtrsLJtfZ7RDcRm0iDgMlc5-CuP2XcNOwDy0uU2mU44ENk-EqtthH7huq8AipYfY0EvmfPRqQI-zI5GlAQIDJiABIVggZplpmQSKsJvg78INyrQUgBo9dv0vaZL6Qp15rOd6wMQiWCAx-ZeQ6T_xTMlY9cG3EWY54wT9Hd6EX7P7Ak-9uwauCA"
const clientDataJSON = "eyJjaGFsbGVuZ2UiOiJlVGR1TjJGaGFIaHhhRFJzT0RsdU1qTnRhMjgiLCJvcmlnaW4iOiJodHRwczovL2UzMDI3MTU3Lm5ncm9rLmlvIiwidHlwZSI6IndlYmF1dGhuLmNyZWF0ZSJ9"
const id = "AFxqmyYe2uwsm19ntENxGbSIOAyVzn4K4_Zdw07APLS5TaZTjgQ2T4Sq22EfuG6rwCKlh9jQS-Z89GpAj7MjkQ"
const rawid = "AFxqmyYe2uwsm19ntENxGbSIOAyVzn4K4_Zdw07APLS5TaZTjgQ2T4Sq22EfuG6rwCKlh9jQS-Z89GpAj7MjkQ"
convertToBuffer(attestationObject)
.then((buffer) => {
return parseAttestationObject(buffer)
})
.then((json) => {
console.log(json)
})
.catch((err) => {
console.log(err)
})
function convertToBuffer(base64) {
return new Promise((resolve, reject) => {
if (typeof base64 === "string") {
base64 = base64.replace(/-/g, "+").replace(/_/g, "/");
base64 = Buffer.from(base64, "base64");
base64 = new Uint8Array(base64);
resolve(base64.buffer);
}
})
}
function parseAttestationObject(attestationObject){
return new Promise((resolve, reject) => {
const authData = cbor.decodeAllSync(Buffer.from(attestationObject));
const authnrDataArrayBuffer = authData[0].authData.buffer;
console.log(authnrDataArrayBuffer)
// What do I do with this authnrDataArrayBuffer? What needs saving to the database?
})
}
It would be helpful if you would be a bit more exact about the exact issue, but in a nutshell:
You'd want to store the rawI This is the identifier that you need to pass in the allowCredentials object in the authentication step, so you'll need it.
The attestationobject is a CBOR encoded value. After some manipulations you should be able to extract a public key from this. You'll be able to to use this certificate to verify the response from the authenticator in the authentication step.
I'm leaving out any specific implementation steps, but please have a look at https://github.com/fido-alliance/webauthn-demo as this project implements webauthn for node.js as well, so you should be able to extract all relevant code from it.
// this is your attestationObject which is web safe base64 encode string
var attestationObject = "o2NmbXRkbm9uZWdhdHRTdG10oGhhdXRoRGF0YVjE4mQ5WmgO3yl24XjxRqkP9LjqRYP-GsIubALB-5K_CK5FXMrOUa3OAAI1vMYKZIsLJfHwVQMAQABcapsmHtrsLJtfZ7RDcRm0iDgMlc5-CuP2XcNOwDy0uU2mU44ENk-EqtthH7huq8AipYfY0EvmfPRqQI-zI5GlAQIDJiABIVggZplpmQSKsJvg78INyrQUgBo9dv0vaZL6Qp15rOd6wMQiWCAx-ZeQ6T_xTMlY9cG3EWY54wT9Hd6EX7P7Ak-9uwauCA";
// need to convert to base64 encode string
attestationObject = attestationObject.replace(/\-/g, '+').replace(/_/g, '/') + '=='.substring(0, (3*attestationObject.length)%4);
// do a base64 decode
var attCbor = Buffer.from(attestationObject, 'base64');
// decode to have CBOR object, using cbor module
const cbor = require("cbor");
var attCborObj = cbor.decodeAllSync(attCbor)[0];
console.log(attCborObj);
Related
I have an axios request that gets a webpage and I need pull a string from the response data between 2 reference strings.
Example: I want to get Stuff from the response data string.
"theID":1234,"theMap":"Stuff"}]};
axios
.get(url)
.then((response) => {
const firstvariable = `theMap":"`;
const secondvariable = `"}]}`;
const data = response.data;
const source = data.match(
new RegExp(firstvariable + "(.*)" + secondvariable)
);
console.log(source); // should be Stuff
})
.catch((err) => {});
Regardless of what regex I try from searching on stackoverflow / googling, nothing works.
By default, Axios takes care of deserializing the JSON into a plain ol' object. So, all you need is something like this:
const response = await axios.get(url);
// since didn't actually give us an example of your JSON,
// you'll need to figure out the correct path.
const stuff = response.data?.path?.to?.stuff;
How to get data params from url and decrypt using the function below in cloudflare worker.https://SOMEID.srv10.workers.dev/hash/ So i want to get hash and decrypt that using that function.
async function decryptData(encryptedData, password) {
try {
// split the IV off from the end (delimited using ::)
encryptedComponents= encryptedData.split("::")
const data = hexStringToUint8Array(encryptedComponents[0])
const iv = hexStringToUint8Array(encryptedComponents[1])
const passwordKey = await getPasswordKey(password, ['decrypt'])
const decryptedContent = await crypto.subtle.decrypt(
{
name: 'AES-CBC',
iv: iv,
},
passwordKey,
data,
)
return arraybufferToString(decryptedContent)
} catch (e) {
throw e
}
}
Assuming your hash is a query parameter you can simply read it like so:
// convert the worker request url into a URL object
const url = new URL(request.url);
// read the encrypted data from the query param
const encryptedData = url.searchParams.get('hash');
// call your function
await decryptData(encryptedData, '<your-password-here>');
I'm currently trying to upload an image to Supabase's Storage, this looks fairly simple from the docs
const { data, error } = await supabase.storage
.from('avatars')
.upload('public/avatar1.png', avatarFile)
Unfortunately Supabase expects a File type.
In my API I have a url that points to the image I want to save, what's the best way for me to get the image at my URL as a File in Node.js?
I have tried this:
let response;
try {
// fetch here is from the isomorphic-unfetch package so I can use it sever-side
response = await fetch('https://example.com/image.jpeg');
} catch (err) {
throw new Error(err);
}
let data = await response?.blob();
let metadata = {
type: 'image/jpeg',
};
let file = new File([data], 'test.jpg', metadata);
return file;
But I get a ReferenceError: File is not defined, which leads me to believe only the browser has access to creating a new File().
All I can find are answers about fs, which I think is Google getting confused. I don't think I can use fs to return a File type.
Any ideas?
So what you can do is: send an HTTP request to the file
const fs = require('fs');
const http = require('http'); // maybe https?
const fileStream = fs.createWriteStream('image.png');
const request = http.get('URL_HERE', function(response) {
response.pipe(fileStream);
});
The above code fetches and writes the file from the URL to your server, and then you need to read it and send it to the upload process.
const finalFile = fs.readFileSync( 'image.png', optionsObject );
And now you have your file object do your upload, then don't forget to remove it if not needed anymore.
You can do something like this:
const fspromise = require('fs').promises;
let response;
try {
// fetch here is from the isomorphic-unfetch package so I can use it sever-side
response = await fetch('https://example.com/image.jpeg');
} catch (err) {
throw new Error(err);
}
let data = await response?.blob();
let metadata = {
type: 'image/jpeg',
};
const file = blob2file(data);
function blob2file(blobData) {
const fd = new FormData();
fd.set('a', blobData);
return fd.get('a');
}
const { data, error } = await supabase.storage
.from('avatars')
.upload('public/avatar1.png', file)
After a lot of research, this isn't actually possible. I've tried a lot of npm packages that advertise being able to convert blobs to Files, but none of them seemed to work.
The only actual solution is to download the file as the other answers have suggested, but in my situation it just wasn't doable.
I am using #aws-crypto/client-node npm module to encrypt decrypt file using KMS key.
but when I run the following code.
I get error 'Missing credentials in config'
const {
KmsKeyringNode,
encrypt,
decrypt
} = require("#aws-crypto/client-node");
const encryptData = async (plainText, context) => {
try {
const {
result
} = await encrypt(keyring, plainText, {
encryptionContext: context
});
return result;
} catch (e) {
console.log(e);
}
};
encryptData('hello world', {
stage: "test",
purpose: "poc",
origin: "us-east-1"
})
I can see couple of issues with this code:
You are trying to import encrypt and decrypt functions directly from the module. This is not how aws-crypto works. You need to use build client to create instance which will hold these methods.
You are using keyring variable, but keyring is never declared? You need to create a keyring using .KmsKeyringNode method.
In order to properly use AWS/KMS to encrypt and decrypt data, take a look into the example bellow. (Make a note that this example does not use a context for its simplicity, nor additional keys which you can add. Also this example assumes that you are using same key for encryption and decryption as well)
const {
AMAZON_ENCRYPTION_KEY_ARN
} = process.env;
const awsCrypto = require('#aws-crypto/client-node');
const awsEncryptionClient = awsCrypto.buildClient(
awsCrypto.CommitmentPolicy.REQUIRE_ENCRYPT_REQUIRE_DECRYPT
);
const keyring = new awsCrypto.KmsKeyringNode({
generatorKeyId: AMAZON_ENCRYPTION_KEY_ARN
});
const encrypt = async (data) => {
try {
const { result } = await awsEncryptionClient.encrypt(keyring, data);
return result.toString('base64');
}
catch(err) {
console.log('Encryption error: ', err);
throw err;
}
};
const decrypt = async (encryptedData) => {
try {
const encryptedBuffer = Buffer.from(encryptedData, 'base64');
const { plaintext } = await awsEncryptionClient.decrypt(keyring, encryptedBuffer);
return plaintext.toString('utf8');
}
catch(err) {
console.log('Decryption error: ', err);
throw err;
}
};
module.exports = {
encrypt,
decrypt
};
You can create a file using code above and invoke functions by importing this file somewhere else. You will need to add encryption/decryption key arn. Beside encryption and decryption, encoding and decoding to base64 is added, so final result is suitable for storage (database for example)
For additional code examples take a look here.
I'm searching a way to get the base64 string representation of a PDFKit document. I cant' find the right way to do it...
Something like this would be extremely convenient.
var doc = new PDFDocument();
doc.addPage();
doc.outputBase64(function (err, pdfAsText) {
console.log('Base64 PDF representation', pdfAsText);
});
I already tried with blob-stream lib, but it doesn't work on a node server (It says that Blob doesn't exist).
Thanks for your help!
I was in a similar predicament, wanting to generate PDF on the fly without having temporary files lying around. My context is a NodeJS API layer (using Express) which is interacted with via a React frontend.
Ironically, a similar discussion for Meteor helped me get to where I needed. Based on that, my solution resembles:
const PDFDocument = require('pdfkit');
const { Base64Encode } = require('base64-stream');
// ...
var doc = new PDFDocument();
// write to PDF
var finalString = ''; // contains the base64 string
var stream = doc.pipe(new Base64Encode());
doc.end(); // will trigger the stream to end
stream.on('data', function(chunk) {
finalString += chunk;
});
stream.on('end', function() {
// the stream is at its end, so push the resulting base64 string to the response
res.json(finalString);
});
Synchronous option not (yet) present in the documentation
const doc = new PDFDocument();
doc.text("Sample text", 100, 100);
doc.end();
const data = doc.read();
console.log(data.toString("base64"));
I just made a module for this you could probably use. js-base64-file
const Base64File=require('js-base64-file');
const b64PDF=new Base64File;
const file='yourPDF.pdf';
const path=`${__dirname}/path/to/pdf/`;
const doc = new PDFDocument();
doc.addPage();
//save you PDF using the filename and path
//this will load and convert
const data=b64PDF.loadSync(path,file);
console.log('Base64 PDF representation', pdfAsText);
//you could also save a copy as base 64 if you wanted like so :
b64PDF.save(data,path,`copy-b64-${file}`);
It's a new module so my documentation isn't complete yet, but there is also an async method.
//this will load and convert if needed asynchriouniously
b64PDF.load(
path,
file,
function(err,base64){
if(err){
//handle error here
process.exit(1);
}
console.log('ASYNC: you could send this PDF via ws or http to the browser now\n');
//or as above save it here
b64PDF.save(base64,path,`copy-async-${file}`);
}
);
I suppose I could add in a convert from memory method too. If this doesn't suit your needs you could submit a request on the base64 file repo
Following Grant's answer, here is an alternative without using node response but a promise (to ease the call outside of a router):
const PDFDocument = require('pdfkit');
const {Base64Encode} = require('base64-stream');
const toBase64 = doc => {
return new Promise((resolve, reject) => {
try {
const stream = doc.pipe(new Base64Encode());
let base64Value = '';
stream.on('data', chunk => {
base64Value += chunk;
});
stream.on('end', () => {
resolve(base64Value);
});
} catch (e) {
reject(e);
}
});
};
The callee should use doc.end() before or after calling this async method.