Using NodeJS express server to handle GOOGLE oauth via OpenID protocol on the server-side, I am trying to have the callback run this checklist after google redirects the user to the API with the auth code + in query string.
Only allow emails with special domain of school to pass (This is a school project).
Make exchange for Token Object containing Auth Token, Refresh Token, etc.
Store Auth token via cookie
Store whole token object via redis cache in JSON OBJECT... THIS IS WHERE I AM CAUGHT. (using the redis module REJSON. REJSON because user data from database can be added to object in memory for CACHING USER DATA...)
Next steps in logic include checking if user exists in mongodb, creating user in mongodb if not, redirecting to browser, carrying cookie, using auth token to check against redis cache as maintaining a persistenting session... will eventually be secure via hash, httpOnly, https etc...
Hopefully that lets you in on the picture of my logic, but this is seemingly a REDIS OR REDIS RELATED MODULE ISSUE.
To use the Redis REJSON module, I am running a redis-redisjson docker image which is exposing to PORT 6379, and the express server(running on PORT 4000) is to be the client of the redis cache server...
Node module Redis-rejson allows redisJSON commands to be mapped to javascript-friendly names.
And Node module Redis is a redis client library for node.
//This is a minimalist web-framework for NodeJS
var express = require("express");
//axios - handles HTTP requests
const axios = require("axios");
//redis - use for caching
//rejson - store JSON via redis
const redis = require("redis"),
rejson = require("redis-rejson");
rejson(redis);
//set ports
const PORT = process.env.PORT || 4000;
const REDIS_PORT = process.env.PORT || 6379;
const client = redis.createClient(REDIS_PORT);
// Init Express app
var app = express();
//callback route from google
app.get("/signin/callback", (req, res, next) => {
//declare vars from query string api return for later use
//console.log(req.query);
let hd = req.query.hd;
let authCode = req.query.code;
//Only allow GUHSD email domains
if(hd !== 'guhsd.net') {
console.log('you are shall not pass')
// The return is for stopping execution of controller
return res.redirect(301, 'http://localhost:3000/?error=invalid_domain');
}
//GET client_id and client_secret FROM JSON
//Make POST REQ to exchange AuthCode for JSON token object
axios.post('https://oauth2.googleapis.com/token', {
client_id: '<taken out as to not expose>',
client_secret: '<taken out as to not expose>',
code: authCode,
grant_type: 'authorization_code',
redirect_uri: 'http://localhost:4000/signin/callback'
})
.then((response) => {
console.log('Your token must to be here');
// Logic after exchange
//1. Start Sessions - Client HTTP only Cookie & Server Redis Cache
//2. Check if ACC exists - if so redirect client :)
//3. if ACC DOES NOT exist, create new acc!! then redirect client :)
async function passOver() {
console.log(response.data);
// example:
// {
// access_token: 'ya29.a0AfH6SMBSm3D99XAhmlxHtYOVVm7gZT9h8Bd8I9RgpYPhK0qXQwGT4fuMWhDsqDpcTfQuIg6bvY-tFUISX4Wm2Is-jprW2sstjmD4hjc2cQh5uIYODg1Re6v80FKENDdEAinTm9kid0QaKiaRxJQHt1deap-Ik1mcy2s',
// expires_in: 3599,
// refresh_token: '1//06qroj95zjugXCgYIARAAGAYSNwF-L9IrzWEi6q0wDpeUVKmnPZYZjZpsMS-KVT4fY9NzHgZCUxgY8fg2J_Nil2vJpMz52y-2pyY',
// scope: 'openid https://www.googleapis.com/auth/userinfo.profile https://www.googleapis.com/auth/userinfo.email',
// token_type: 'Bearer',
// id_token: 'eyJhbGciOiJSUzI1NiIsImtpZCI6Ijk2MGE3ZThlODM0MWVkNzUyZjEyYjE4NmZhMTI5NzMxZmUwYjA0YzAiLCJ0eXAiOiJKV1QifQ.eyJpc3MiOiJodHRwczovL2FjY291bnRzLmdvb2dsZS5jb20iLCJhenAiOiIyNDM1NjY4NDcxMDItdTRkazg1Y21qcjEybWgya25ycHYzaW5zMnRjcnBzOHUuYXBwcy5nb29nbGV1c2VyY29udGVudC5jb20iLCJhdWQiOiIyNDM1NjY4NDcxMDItdTRkazg1Y21qcjEybWgya25ycHYzaW5zMnRjcnBzOHUuYXBwcy5nb29nbGV1c2VyY29udGVudC5jb20iLCJzdWIiOiIxMTA1MzUzMTYxMjcxODg4Mjg5NjYiLCJoZCI6Imd1aHNkLm5ldCIsImVtYWlsIjoiMzMwMjk0QGd1aHNkLm5ldCIsImVtYWlsX3ZlcmlmaWVkIjp0cnVlLCJhdF9oYXNoIjoidEhoVEl6eWp3MHcyRFpVaVpUMmxCQSIsIm5hbWUiOiJERVZJTiBQUk9WRU5DRSIsInBpY3R1cmUiOiJodHRwczovL2xoMy5nb29nbGV1c2VyY29udGVudC5jb20vLVhTaV9oU2JsVWo0L0FBQUFBQUFBQUFJL0FBQUFBQUFBQUFBL0FNWnV1Y25HXzBwS3N0eVo3cEI0Wk0yZlpMQkNIUEU3UkEvczk2LWMvcGhvdG8uanBnIiwiZ2l2ZW5fbmFtZSI6IkRFVklOIiwiZmFtaWx5X25hbWUiOiJQUk9WRU5DRSIsImxvY2FsZSI6ImVuIiwiaWF0IjoxNTkwNjEwODE4LCJleHAiOjE1OTA2MTQ0MTh9.Nt2eGzCsKrUmyztqYWiZ16-1S7OCRcUFlSFvhVAy9HusYfLqp0nz3diUkuP-D_27BCtBsZCQ0JC1evPISwLX9H1hJs_GKYSD12s-ovJ8S0AzghY0M-AOuYxhGvKautusmXYfHvhfcPj7IKhPo_IXBl3x-ryOtRRrpJR6c30QPl1JUae74sAcLk8H1stLgqptrrRTgJWYdTXxJHSrZcR8RsLw2aY4GwuPGEX-AD6h51IZlNTl_6qNpaIt_7mFSUV-iF1PECAotfhKHdAryZCFRBq4XE6uJnYq3WnOVAMAEYwqT543pxarXOmLuAwhDqqewyuCXsjlbyhBys-4iEhYAg'
// }
//expires_in is in seconds... cookie takes milliseconds as expires arg.
//send to client, store in cookie(browser's session starts)
res.cookie(
"tokenResponse", `${response.data.access_token}`, {
maxAge: `${response.data.expires_in}`
}
)
var keyObject = JSON.stringify(response.data);
//set redis key to user
var user = 'user';
//store response.data in redis cache(server's session starts)
client.json_set(user, '.', keyObject, function (err) {
if (err) { throw err; }
console.log('Set JSON at key ' + user + '.');
client.json_get(user, '.access_token', function (err, value) {
if (err) { throw err; }
console.log('value of .', value); //outputs JSON
client.quit();
});
});
res.json(keyObject);
//redirect from server to frontend
// await res.redirect('http://localhost:3000');
}
//call passover
passOver();
})
//if err, log & redirect user to client
.catch((error) => {
console.log(error.request);
console.log(error.response.data);
return res.redirect(301, 'http://localhost:3000/?error=google_failed_exchange');
});
});
//start the express server
app.listen(4000, () =>
console.log(`App started on port ${PORT}`)
);
module.exports = app;
The err im getting is
if (err) { throw err; }
^
ReplyError: ERR unknown command 'json.set'
at parseError (E:\docker-node-react-nginx\backend\node_modules\redis-parser\lib\parser.js:179:12)
at parseType (E:\docker-node-react-nginx\backend\node_modules\redis-parser\lib\parser.js:302:14) {
command: 'JSON.SET',
args: [
'user',
'.',
'{"access_token":"ya29.a0AfH6SMCSN3_0fXFexXYUqrkfhlJ5FmdkO-eqKeiXeSnRzlwD5aBDpBF7y-pXMDBY1YFqXLf-JU0mc5FXVo8nZER-6wB-hAm7qW_0w4Z3TcfoQfOT7ZXg8ZqK2DPyW8TnsZELWe9eDBYvFqM0lTWyWe3Z9ZqUXgv6Kz4","expires_in":3599,"refresh_token":"1//06FRrH2GkKtP2CgYIARAAGAYSNwF-L9IrnL1ZpMhxaiYWYuSnI7p6DG0uFIO3Vu2qt40Scio5SAlGT0mvBZ8hWvaPcJEEnjaunDw","scope":"https://www.googleapis.com/auth/userinfo.email openid https://www.googleapis.com/auth/userinfo.profile","token_type":"Bearer","id_token":"eyJhbGciOiJSUzI1NiIsImtpZCI6ImZiOGNhNWI3ZDhkOWE1YzZjNjc4ODA3MWU4NjZjNmM0MGYzZmMxZjkiLCJ0eXAiOiJKV1QifQ.eyJpc3MiOiJodHRwczovL2FjY291bnRzLmdvb2dsZS5jb20iLCJhenAiOiIyNDM1NjY4NDcxMDItdTRkazg1Y21qcjEybWgya25ycHYzaW5zMnRjcnBzOHUuYXBwcy5nb29nbGV1c2VyY29udGVudC5jb20iLCJhdWQiOiIyNDM1NjY4NDcxMDItdTRkazg1Y21qcjEybWgya25ycHYzaW5zMnRjcnBzOHUuYXBwcy5nb29nbGV1c2VyY29udGVudC5jb20iLCJzdWIiOiIxMTA1MzUzMTYxMjcxODg4Mjg5NjYiLCJoZCI6Imd1aHNkLm5ldCIsImVtYWlsIjoiMzMwMjk0QGd1aHNkLm5ldCIsImVtYWlsX3ZlcmlmaWVkIjp0cnVlLCJhdF9oYXNoIjoiV2kwRVdlSl9fQmVsaGV1RF9SVEl6USIsIm5hbWUiOiJERVZJTiBQUk9WRU5DRSIsInBpY3R1cmUiOiJodHRwczovL2xoMy5nb29nbGV1c2VyY29udGVudC5jb20vLVhTaV9oU2JsVWo0L0FBQUFBQUFBQUFJL0FBQUFBQUFBQUFBL0FNWnV1Y25HXzBwS3N0eVo3cEI0Wk0yZlpMQkNIUEU3UkEvczk2LWMvcGhvdG8uanBnIiwiZ2l2ZW5fbmFtZSI6IkRFVklOIiwiZmFtaWx5X25hbWUiOiJQUk9WRU5DRSIsImxvY2FsZSI6ImVuIiwiaWF0IjoxNTkwNjI0NDIyLCJleHAiOjE1OTA2MjgwMjJ9.IyF5uB2ldLBQLu3CVMDpXf8szEK_BbR8SPrtdpJR_Y3bHklJ8e3JYGQT9AWjkcSy0I4DNUhkXiFk25HvZ06u2ekGd_adSknUVNwZe_N1IQTlMF1m-oqWbaRtnr4oxerQg_YunZDD4z_Lh5ecSDVz4X8H39uO7jrAvY1CdnZfZ4D2Je8aV1Zns5JahKhOTopPcy5sE1dSBNPqqGvUiY9h0MQHne9byUz9jMvog3YI-8-uexjC_JWsbzMFjE65ze5_cUpApYB5tUrNTjqvhiYgcimIPOXoto_VIHHEEoho5uHOkUQw_UVXleUa9vI77W1j7U7HnH-h_3C5ylx7UEDm6Q"}'
],
code: 'ERR'
}
[nodemon] app crashed - waiting for file changes before starting...
import Redis from 'ioredis'
const connOpts = {
port: process.env.REDIS_PORT,
host: process.env.REDIS_HOST
}
const client = new Redis(connOpts)
const sets = (id, keyPaths, input) => {
return client.send_command('JSON.SET', id, keyPaths, JSON.stringify(input)).then((res) => {
return res
}).catch((e) => {
console.error('redis insertion error', e)
})
}
sets("134", ".", {"access_token":"ya29"}
Related
I did not know where to go next so I'm going to post my issue here, as I've already seen some related issues on this matter. Unfortunately the solutions provided did not work in my case, and I do not know what to try more.
So some background: I have a NodeJS/ExpressJS/passport-saml application that authenticates against an ADFS system. The SSO part of the matter works perfectly, but I can't seem to get the SLO part working.
What happens is that when I initiate either a SP-initiated or IdP-initiated logout it hangs on the first SP. This first SP is being logged out correctly, but it is then redirected to the login page of the first SP and keeps waiting for the credentials to be entered, effectively halting the redirect chain that has to happen.
What I've tried so far is a lot, including using both POST and HTTP-Redirect bindings on my SLO ADFS endpoint/NodeJS server, modifying the routes etc.
Current implementation is as follows:
SLO endpoint configuration (equal for each SP, the blacked out part contains ):
The passport-saml configuration is as follows on the SP server:
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ IMPORTS ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~//
// NodeJS native
const path = require('path');
const fs = require('fs');
// NodeJS packages
const SamlStrategy = require('passport-saml').Strategy;
const { Database } = require('../../Database');
// Custom imports
const { ApplicationConfiguration } = require('../../ApplicationConfiguration');
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ CONSTANTS ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~//
let strategy = {};
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ INIT ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~//
/**
* Initialise the passport saml strategy with the necessary configuration parameters.
*/
const initStrategy = () => {
// Get additional required configuration
const config = ApplicationConfiguration.getProperties([
['CGS_HOST'],
['AUTH_PORT'],
['SSO', 'host'],
['SSO', 'identifier'],
['SSO', 'cert'],
['SSO', 'algorithm'],
['HTTPS_CERT_PRIVATE_PATH'],
]);
// Define the SAML strategy based on configuration
strategy = new SamlStrategy(
{
// URL that should be configured inside the AD FS as return URL for authentication requests
callbackUrl: `https://${<sp_host_name>}:${<sp_port_value>}/sso/callback`,
// URL on which the AD FS should be reached
entryPoint: <idp_host_name>,
// Identifier for the CIR-COO application in the AD FS
issuer: <sp_identifier_in_idp>,
identifierFormat: null,
// CIR-COO private certificate
privateCert: fs.readFileSync(<sp_server_private_cert_path>, 'utf8'),
// Identity Provider's public key
cert: fs.readFileSync(<idp_server_public_cert_path>, 'utf8'),
authnContext: ['urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransport'],
// AD FS signature hash algorithm with which the response is encrypted
signatureAlgorithm: <idp_signature_algorithm>,
// Single Log Out URL AD FS
logoutUrl: <idp_host_name>,
// Single Log Out callback URL
logoutCallbackUrl: `https://${<sp_host_name>}:${<sp_port_value>}/slo/callback`,
// skew that is acceptable between client and server when checking validity timestamps
acceptedClockSkewMs: -1,
},
async (profile, done) => {
// Map ADFS groups to Group without ADFS\\ characters
const roles = profile.Roles.map(role => role.replace('ADFS\\', ''));
// Get id's from the roles
const queryResult = await Database.executeQuery('auth-groups', 'select_group_ids_by_name', [roles]);
// Map Query result to Array for example: [1,2]
const groupIds = queryResult.map(group => group.id);
done(null,
{
sessionIndex: profile.sessionIndex,
nameID: profile.nameID,
nameIDFormat: profile.nameIDFormat,
id: profile.DistinguishedName,
username: profile.DistinguishedName,
displayName: profile.DisplayName,
groups: profile.Roles,
mail: profile.Emailaddress,
groupIds,
});
},
);
// Return the passport strategy
return strategy;
};
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PASSPORT CONFIG ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~//
/**
* Initialise the passport instance and add the saml passport strategy to it for authentication
* #param {Object} passport - Passport object
*/
const initPassport = (passport) => {
// (De)serialising
passport.serializeUser((user, done) => {
done(null, user);
});
passport.deserializeUser((user, done) => {
done(null, user);
});
// Initialise the strategy
const passportStrategy = initStrategy();
// Addition strategy to passport
passport.use('saml', passportStrategy);
};
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ HELPERS ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~//
/**
* Get the metadata from the Service Provider (this server).
* #param {String} publicPath - Path to public certificate
* #return {Promise<any>} - Metadata object for this application
*/
const getMetaData = publicPath => new Promise((resolve) => {
const metaData = strategy.generateServiceProviderMetadata({}, fs.readFileSync(path.join(publicPath), 'utf8'));
resolve(metaData);
});
/**
* Construct a Single Logout Request and send it to the IdP.
* #param {Object} req - Default request object
* #param {Object} res - Default response object
*/
const logout = (req, res) => {
// Construct SLO request for IdP
strategy.logout(req, (err, url) => {
req.logOut();
// Redirect to SLO callback URL and send logout request.
return res.redirect(url);
});
};
const getStrategy = () => strategy;
module.exports = {
initPassport,
getStrategy,
getMetaData,
logout,
};
And the relevant routes and functions are as follows:
const logOutLocalSession = sid => new Promise(((resolve, reject) => {
log.info(`Received request to destroy session with sid ${sid}.`);
// Destroy local session
store.destroy(sid, (err) => {
if (err) {
log.error(`Error occurred while logging out local session with SID ${sid}: ${err}`);
reject('Onbekende fout opgetreden bij uitloggen lokaal.');
}
log.info(`Successfully logged out user locally with SID ${sid}.`);
resolve();
});
}));
const logOutAllSessions = async (req, res) => {
// Extract username to get all sessions
const { username } = req.session.passport.user;
log.info(`Received request to log user ${username} out of all sessions.`);
const sessionIdsRes = await Database.executeQuery('sessions', 'select_sids_by_user_id', [username]);
// Loop over all sessions and destroy them
const destroyPromises = [];
sessionIdsRes.forEach((sessionIdRes) => {
destroyPromises.push(logOutLocalSession(sessionIdRes.sid));
});
await Promise.all(destroyPromises);
// Remove local session from request
req.session = null;
log.info(`User ${username} logged out successfully from all known sessions.`);
};
const logOutIdp = (req, res) => {
const { username } = req.session.passport.user;
log.info(`Received request to log out user ${username} on Identity Provider.`);
const strategy = passportImpl.getStrategy();
// Create logout request for IdP
strategy.logout(req, async (err, url) => {
// Destroy local sessions
logOutAllSessions(req, res);
// Redirect to SLO callback URL and send logout request.
return res.redirect(url);
});
};
// SP initiated logout sequence
app.get('/auth/logout', (req, res) => {
const { username } = req.session.passport.user;
// If user not logged in, redirect to login
if (!req.user) {
return res.redirect('/saml/login');
}
if (username === 'Administrator' || username === 'Support user') {
logOutLocalSession(req.session.id);
} else {
logOutIdp(req, res);
}
});
// IdP initiated logout sequence or from other SP
app.post('/slo/callback', logOutAllSessions);
If there is some information missing I will be able to provide. I do hope I can get some leads on what to try next! Thanks in advance !
In terms of the ADFS configuration:
"Trusted URL" should be the ADFS logout endpoint - you can see that in the metadata - so that ADFS can clear cookies.
"Response URL" should be the endpoint in your app. that expects the SLO response so that it can clear client cookies.
I have a web server written in node/express which uses passport to authenticate users with JSON Web Tokens (JWT).
For regular HTTP methods this is what I use:
app.get('/api/stuff', isLoggedIn(), (req, res) => {
//get stuff
res.send( whatever );
});
Where isLoggedIn is this function:
function isLoggedIn() {
return passport.authenticate('local-jwt', { session: false });
}
The authentication itself is handled by the 'local-jwt' configuration in passport using a JWTStrategy object and it works as expected.
Now I need to add web sockets to this app. I'm using the ws library. Here is what I have so far:
const wss = new WebSocket.Server({
port: 8080,
verifyClient: async (info, done) => {
// ???
done( result );
}
});
wss.on('connection', ws => {
// web socket events
});
How do I use the passport authentication to only allow clients with the correct token to connect to the web socket server?
You can sign the token with the user's id or anything you are using to differentiate users
var today = new Date();
var exp = new Date(today);
exp.setDate(today.getDate() + 60);
jwt.sign({
id: user._id,
exp: parseInt(exp.getTime() / 1000),
}, secret);
then pass the token to the frontend and use it to initialize your Websocket connection with it appended to the url then take and decode it in the server like this
const wss = new WebSocket.Server({
verifyClient: async (info, done) => {
console.log(info.req.url);
console.log('------verify client------');
const token = info.req.url.split('/')[1];
var decoded = jwt.verify(token, secret);
info.req.user = await User.findById(decoded.id).exec();
/*info.req.user is either null or the user and you can destroy the connection
if its null */
done(info.req);
},
server
});
This is one of many ways. There is a way with sessions but I see you don't want to use them. Also I don't know if you use mongo but it's the same for any database you just have to change some code.
⚠️ I forgot a process.exit(0) in the main thread, so the app was terminated before the callback was executed. This code sample works like a charm.
Here is the code from googleapis nodejs client I have issue on:
First thing first, I would like to get the list of contacts for one user using a nodejs application.
Set up a OAuth2Client
So I set up a OAuth2Client with this code:
const {
client_id: CLIENT_ID,
client_secret: CLIENT_SECRET
} = require("./keys.json");
const REDIRECT_URL = "http://localhost:3000/oauth2callback";
const oAuth2Client = new OAuth2Client(CLIENT_ID, CLIENT_SECRET, REDIRECT_URL);
Then, using a temporary server, I ask for token using the user's credentials:
function getGoogleCode() {
return new Promise((resolve, reject) => {
// Open an http server to accept the oauth callback. In this simple example, the
// only request to our webserver is to /oauth2callback?code=<code>
const server = http
.createServer(async (req, res) => {
if (req.url.indexOf("/oauth2callback") > -1) {
// acquire the code from the querystring, and close the web server.
const { code } = querystring.parse(url.parse(req.url).query);
res.end(
`Authentication successful! Please return to the console. [code: ${code}]`
);
server.close();
resolve(code);
}
reject(new Error("oops", req, res));
})
.listen(3000, () => {
// open the browser to the authorize url to start the workflow
// Generate the url that will be used for the consent dialog.
const authorizeUrl = oAuth2Client.generateAuthUrl({
access_type: "offline",
scope: ["https://www.googleapis.com/auth/contacts.readonly"]
});
opn(authorizeUrl);
});
});
}
Then I finish to set up my client:
const code = await getGoogleCode();
const { tokens } = await oAuth2Client.getToken(code);
oAuth2Client.setCredentials(tokens);
When everything's fine
I managed to get a response with the low level API:
const personFields = ["emailAddresses", "names"];
const url = `https://people.googleapis.com/v1/people/me/connections?personFields=${personFields.join(
","
)}`;
const res = await oAuth2Client.request({ url });
console.log(chalk.gray(JSON.stringify(res.data.connections, null, 2)));
Everything is working like a charm, but, I would like to use the high level API from the same library
google.people API
As described in API Explorer, I build the code below:
const personFields = ["emailAddresses", "names"];
people.people.connections.list(
{ resourceName: "people/me", personFields },
(res, err) => {
console.log("error: ", err);
console.log("res1: ", res);
}
);
No error, no res1, nothing.
⚠️ I forgot a process.exit(0) in the main thread, so the app was terminated before the callback was executed. This code sample works like a charm.
Not sure why you aren't getting logging. But you probably should return the res in the callback, otherwise calling await on the callback will return undefined.
Also make sure to set personFields in the people.people.connections.list call.
I'm trying to use a backend nodeJS server to access (and edit) the device configuration on IoT-Core referring to this API docs
However, I keep getting error:
code 401 with error message "message": "Request had invalid authentication credentials. Expected OAuth 2 access token, login cookie or other valid authentication credential. See https://developers.google.com/identity/sign-in/web/devconsole-project.",
"status": "UNAUTHENTICATED".
I created a service account and a key from Google IAM, and gave it Cloud IoT Device Controller permissions, which could update device configurations but not create or delete. Subsequently, I changed it to Cloud IoT Admin and even Project Editor permissions, but still saw the same error message. Am I getting the keys all wrong, or not doing something else I should be doing?
Code below was how I invoked the request
function createJwt (projectId, privateKeyFile, algorithm) {
// Create a JWT to authenticate this device. The device will be disconnected
// after the token expires, and will have to reconnect with a new token. The
// audience field should always be set to the GCP project ID.
const token = {
'iat': parseInt(Date.now() / 1000),
'exp': parseInt(Date.now() / 1000) + 20 * 60, // 20 minutes
'aud': projectId
};
const privateKey = fs.readFileSync(privateKeyFile);
return jwt.sign(token, privateKey, { algorithm: algorithm });
}
app.get('/', function(req, res){
let authToken = createJwt('test-project', './keys/device-config.pem', 'RS256');
const options = {
url: 'https://cloudiot.googleapis.com/v1/projects/test-project/locations/us-central1/registries/dev-registry/devices/test-device',
headers: {
'authorization': 'Bearer ' + authToken,
'content-type': 'application/json',
'cache-control': 'no-cache'
},
json: true
}
request.get(options, function(error, response){
if(error) res.json(error);
else res.json(response);
})
});
For backend servers to interact with IoT-Core, the authentication method is not the same as for device MQTT or HTTP connections. Reference: https://cloud.google.com/iot/docs/samples/device-manager-samples#get_a_device
I was able to retrieve and update device configurations using the code below
function getClient (serviceAccountJson, cb) {
const serviceAccount = JSON.parse(fs.readFileSync(serviceAccountJson));
const jwtAccess = new google.auth.JWT();
jwtAccess.fromJSON(serviceAccount);
// Note that if you require additional scopes, they should be specified as a
// string, separated by spaces.
jwtAccess.scopes = 'https://www.googleapis.com/auth/cloud-platform';
// Set the default authentication to the above JWT access.
google.options({ auth: jwtAccess });
const DISCOVERY_API = 'https://cloudiot.googleapis.com/$discovery/rest';
const API_VERSION = 'v1';
const discoveryUrl = `${DISCOVERY_API}?version=${API_VERSION}`;
google.discoverAPI(discoveryUrl, {}, (err, client) => {
if (err) {
console.log('Error during API discovery', err);
return undefined;
}
cb(client);
});
}
function getDevice (client, deviceId, registryId, projectId, cloudRegion) {
const parentName = `projects/${process.env.GCP_PROJECT_ID}/locations/${cloudRegion}`;
const registryName = `${parentName}/registries/${registryId}`;
const request = {
name: `${registryName}/devices/${deviceId}`
};
const promise = new Promise(function(resolve, reject){
client.projects.locations.registries.devices.get(request, (err, data) => {
if (err) {
console.log('Could not find device:', deviceId);
console.log(err);
reject(err);
} else {
console.log(data.config.binaryData);
resolve(data);
}
});
});
return promise;
}
app.get('/', function(req, res){
const cb = function(client){
getDevice(client, 'test-device', 'dev-registry', process.env.GCP_PROJECT_ID, 'us-central1')
.then(function(response){
let decoded = new Buffer(response.config.binaryData, 'base64').toString();
res.json(decoded);
})
.catch(function(error){
res.json(error);
})
}
getClient(serviceAccountJson, cb);
});
I think what you're looking to do is best accomplished using the client library for NodeJS.
First, retrieve an API client object as done in the sample. This will take in the service account credentials you used and will authenticate against Google API Core servers.
At the point in the referenced code where cb(client); is invoked, you'll have your client object and are ready to update your device. Add the imports and API constants from the sample and replace the code where you have a client object with the following code and you should be set.
Use some strings for your device identifiers:
const projectId = 'my-project';
const cloudRegion = 'us-central1';
const registryId = 'my-registry';
const deviceId = 'my-device;
const config = '{fan: 800}';
Next, form your device String:
const deviceId = `projects/${projectId}/locations/${cloudRegion}/registries/${registryId}/devices/${deviceId}`;
const binaryData = Buffer.from(config).toString('base64');
Now you form your request object and execute:
const request = {
name: `${registryName}`,
versionToUpdate: 0,
binaryData: binaryData
};
console.log(request);
client.projects.locations.registries.devices
.modifyCloudToDeviceConfig(
request,
(err, data) => {
if (err) {
console.log('Could not update config:', deviceId);
console.log('Message: ', err);
} else {
console.log('Success :', data);
}
});
Your configuration is updated. If your device is subscribed to the config topic on MQTT it will receive the latest configuration, otherwise, you can poll for the configuration with HTTP from your device.
Just to confirm, when you created the SSL key pair, and when you registered the device with the Cloud IoT Core registry, did you match the type of key created with the radio button you registered it with?
Also to confirm, you put the Google root certificate on the device in the same directory as the private key: ./keys/device-config.pem ? If not you can fetch it with: wget https://pki.google.com/roots.pem.
What is the best way (most secure and easiest) to authenticate a user for a server side route?
Software/Versions
I'm using the latest Iron Router 1.* and Meteor 1.* and to begin, I'm just using accounts-password.
Reference code
I have a simple server side route that renders a pdf to the screen:
both/routes.js
Router.route('/pdf-server', function() {
var filePath = process.env.PWD + "/server/.files/users/test.pdf";
console.log(filePath);
var fs = Npm.require('fs');
var data = fs.readFileSync(filePath);
this.response.write(data);
this.response.end();
}, {where: 'server'});
As an example, I'd like to do something close to what this SO answer suggested:
On the server:
var Secrets = new Meteor.Collection("secrets");
Meteor.methods({
getSecretKey: function () {
if (!this.userId)
// check if the user has privileges
throw Meteor.Error(403);
return Secrets.insert({_id: Random.id(), user: this.userId});
},
});
And then in client code:
testController.events({
'click button[name=get-pdf]': function () {
Meteor.call("getSecretKey", function (error, response) {
if (error) throw error;
if (response)
Router.go('/pdf-server');
});
}
});
But even if I somehow got this method working, I'd still be vulnerable to users just putting in a URL like '/pdf-server' unless the route itself somehow checked the Secrets collection right?
In the Route, I could get the request, and somehow get the header information?
Router.route('/pdf-server', function() {
var req = this.request;
var res = this.response;
}, {where: 'server'});
And from the client pass a token over the HTTP header, and then in the route check if the token is good from the Collection?
In addition to using url tokens as the other answer you could also use cookies:
Add in some packages that allow you to set cookies and read them server side:
meteor add mrt:cookies thepumpinglemma:cookies
Then you could have something that syncs the cookies up with your login status
Client Side
Tracker.autorun(function() {
//Update the cookie whenever they log in or out
Cookie.set("meteor_user_id", Meteor.userId());
Cookie.set("meteor_token", localStorage.getItem("Meteor.loginToken"));
});
Server Side
On the server side you just need to check this cookie is valid (with iron router)
Router.route('/somepath/:fileid', function() {
//Check the values in the cookies
var cookies = new Cookies( this.request ),
userId = cookies.get("meteor_user_id") || "",
token = cookies.get("meteor_token") || "";
//Check a valid user with this token exists
var user = Meteor.users.findOne({
_id: userId,
'services.resume.loginTokens.hashedToken' : Accounts._hashLoginToken(token)
});
//If they're not logged in tell them
if(!user) return this.response.end("Not allowed");
//Theyre logged in!
this.response.end("You're logged in!");
}, {where:'server'});
I think I have a secure and easy solution for doing this from within IronRouter.route(). The request must be made with a valid user ID and auth token in the header. I call this function from within Router.route(), which then gives me access to this.user, or responds with a 401 if the authentication fails:
// Verify the request is being made by an actively logged in user
// #context: IronRouter.Router.route()
authenticate = ->
// Get the auth info from header
userId = this.request.headers['x-user-id']
loginToken = this.request.headers['x-auth-token']
// Get the user from the database
if userId and loginToken
user = Meteor.users.findOne {'_id': userId, 'services.resume.loginTokens.token': loginToken}
// Return an error if the login token does not match any belonging to the user
if not user
respond.call this, {success: false, message: "You must be logged in to do this."}, 401
// Attach the user to the context so they can be accessed at this.user within route
this.user = user
// Respond to an HTTP request
// #context: IronRouter.Router.route()
respond = (body, statusCode=200, headers) ->
this.response.statusCode statusCode
this.response.setHeader 'Content-Type', 'text/json'
this.response.writeHead statusCode, headers
this.response.write JSON.stringify(body)
this.response.end()
And something like this from the client:
Meteor.startup ->
HTTP.get "http://yoursite.com/pdf-server",
headers:
'X-Auth-Token': Accounts._storedLoginToken()
'X-User-Id': Meteor.userId()
(error, result) -> // This callback triggered once http response received
console.log result
This code was heavily inspired by RestStop and RestStop2. It's part of a meteor package for writing REST APIs in Meteor 0.9.0+ (built on top of Iron Router). You can check out the complete source code here:
https://github.com/krose72205/meteor-restivus
Because server-side routes act as simple REST endpoints, they don't have access to user authentication data (e.g. they can't call Meteor.user()). Therefore you need to devise an alternative authentication scheme. The most straightforward way to accomplish this is with some form of key exchange as discussed here and here.
Example implementation:
server/app.js
// whenever the user logs in, update her apiKey
Accounts.onLogin(function(info) {
// generate a new apiKey
var apiKey = Random.id();
// add the apiKey to the user's document
Meteor.users.update(info.user._id, {$set: {apiKey: apiKey}});
});
// auto-publish the current user's apiKey
Meteor.publish(null, function() {
return Meteor.users.find(this.userId, {fields: {apiKey: 1}});
});
lib/routes.js
// example route using the apiKey
Router.route('/secret/:apiKey', {name: 'secret', where: 'server'})
.get(function() {
// fetch the user with this key
// note you may want to add an index on apiKey so this is fast
var user = Meteor.users.findOne({apiKey: this.params.apiKey});
if (user) {
// we have authenticated the user - do something useful here
this.response.statusCode = 200;
return this.response.end('ok');
} else {
// the key is invalid or not provided so return an error
this.response.statusCode = 403;
return this.response.end('not allowed');
}
});
client/app.html
<template name="myTemplate">
{{#with currentUser}}
secret
{{/with}}
</template>
Notes
Make /secret only accessible via HTTPS.
While it's very likely that the user requesting /secret is currently connected, there is no guarantee that she is. The user could have logged in, copied her key, closed the tab, and initiated the request sometime later.
This is a simple means of user authentication. I would explore more sophisticated mechanisms (see the links above) if the server-route reveals high-value data (SSNs, credit cards, etc.).
See this question for more details on sending static content from the server.
I truly believe using HTTP headers are the best solution to this problem because they're simple and don't require messing about with cookies or developing a new authentication scheme.
I loved #kahmali's answer, so I wrote it to work with WebApp and a simple XMLHttpRequest. This has been tested on Meteor 1.6.
Client
import { Meteor } from 'meteor/meteor';
import { Accounts } from 'meteor/accounts-base';
// Skipping ahead to the upload logic
const xhr = new XMLHttpRequest();
const form = new FormData();
// Add files
files.forEach((file) => {
form.append(file.name,
// So BusBoy sees as file instead of field, use Blob
new Blob([file.data], { type: 'text/plain' })); // w/e your mime type is
});
// XHR progress, load, error, and readystatechange event listeners here
// Open Connection
xhr.open('POST', '/path/to/upload', true);
// Meteor authentication details (must happen *after* xhr.open)
xhr.setRequestHeader('X-Auth-Token', Accounts._storedLoginToken());
xhr.setRequestHeader('X-User-Id', Meteor.userId());
// Send
xhr.send(form);
Server
import { Meteor } from 'meteor/meteor';
import { WebApp } from 'meteor/webapp';
import { Roles } from 'meteor/alanning:roles'; // optional
const BusBoy = require('connect-busboy');
const crypto = require('crypto'); // built-in Node library
WebApp.connectHandlers
.use(BusBoy())
.use('/path/to/upload', (req, res) => {
const user = req.headers['x-user-id'];
// We have to get a base64 digest of the sha256 hashed login token
// I'm not sure when Meteor changed to hashed tokens, but this is
// one of the major differences from #kahmali's answer
const hash = crypto.createHash('sha256');
hash.update(req.headers['x-auth-token']);
// Authentication (is user logged-in)
if (!Meteor.users.findOne({
_id: user,
'services.resume.loginTokens.hashedToken': hash.digest('base64'),
})) {
// User not logged in; 401 Unauthorized
res.writeHead(401);
res.end();
return;
}
// Authorization
if (!Roles.userIsInRole(user, 'whatever')) {
// User is not authorized; 403 Forbidden
res.writeHead(403);
res.end();
return;
}
if (req.busboy) {
// Handle file upload
res.writeHead(201); // eventually
res.end();
} else {
// Something went wrong
res.writeHead(500); // server error
res.end();
}
});
I hope this helps someone!
Since Meteor doesn't use session cookies, client must explicitly include some sort of user identification when making a HTTP request to a server route.
The easiest way to do it is to pass userId in the query string of the URL. Obviously, you also need to add a security token that will prove that the user is really who the claim they are. Obtaining this token can be done via a Meteor method.
Meteor by itself doesn't provide such mechanism, so you need some custom implementation. I wrote a Meteor package called mhagmajer:server-route which was thoroughly tested. You can learn more about it here: https://blog.hagmajer.com/server-side-routing-with-authentication-in-meteor-6625ed832a94