How to set up Google Domains Dynamic DNS with Node.js & express? - node.js

I have set up a Node.Js website on my Raspberry Pi running express.
I want to update my google DNS domain, using Dynamic DNS, from Node.JS/Express
What I have attempted:
function domainUpdate() {
console.log(`[SERVER] Posting to Google's Servers...`)
let username = process.env.USERNAME || ``;
let password = process.env.PASSWORD || ``;
let hostname = process.env.HOSTNAME || ``;
let url = `https://${username}:${password}#domains.google.com/nic/update?hostname=${hostname}`;
https.request(url, (res) => {
console.log(`status - ${res.statusCode}`);
console.log(`output - ${res.complete}`)
}).on("error", (e) => {
if (e) throw e
})
Using curl commands it would look something like this:
curl https://$USERNAME:$PASSWORD#domains.google.com/nic/update?hostname=$HOSTNAME&myip=$IP_ADDRESS
there is no output except from "[SERVER] Posting to Google's Servers..."

Give something like Axios a try for making the actual request. It handles a lot of the heavy lifting for you:
const axios = require('axios');
function domainUpdate() {
console.log(`[SERVER] Posting to Google's Servers...`)
let username = process.env.USERNAME || ``;
let password = process.env.PASSWORD || ``;
let hostname = process.env.HOSTNAME || ``;
let url = `https://${username}:${password}#domains.google.com/nic/update?hostname=${hostname}`;
axios.get(url)
.then(res => {
console.log(`status - ${res.status}`);
console.log(`output - ${res.data}`);
})
.catch(err => {
throw err;
})
}
It looks like that endpoint returns a 200 as long as the request is formatted properly. The Google support article that you linked to lists the possible responses, which will be in the data property of your Axios response object.

Related

How to request apikey from backend API using node js?

I was learning to build a weather app using Node (Express) + React. I successfully fetched weather data from open weather API.
However I was directly using the open weather API key in my React app like this const weatherURL = 'http://api.openweathermap.org/data/2.5/weather?q=london,uk&APPID=1234567qwerty';. Obviously this is not safe as it exposed the API key to the client. I thought about storing the API key in .env file, but according to [this answer][1], I should never store API key in .env file or .gitignore. The right way is to make a request to backend API and make an API call to backend and send the data back. I could not find out how to do it. Can anyone help?
Following is my node js code:
const express = require('express');
const cors = require('cors');
const app = express();
const SELECT_ALL_QUERY = 'SELECT * FROM `mySchema`.`myTable`;';
app.use(cors());
app.get('/', (req, res) => {
res.send('go to /myTable to see content')
});
const pool = require('./awsPool');
pool.getConnection((err, connection) => {
if (err) {
return console.log('ERROR! ', err);
}
if(!connection) {
return console.log('No connection was found');
}
app.get('/myTable', (req, res) => {
console.log(connection);
connection.query(SELECT_ALL_QUERY, (err, results) => {
if (err) {
return res.send(err)
}
else {
return res.json({
data: results
})
};
});
});
});
let port=process.env.PORT||4000;
app.listen(port, () => {
console.log(`App running on port ${port} `);
});```
[1]: https://stackoverflow.com/a/57103663/8720421
What the linked answer was suggesting is to create a route in your Node/Express backend API that will make the call to the weather API for you, instead of the front end. This way the request and your API key are not public-facing whenever your front end makes a call.
The method for doing this would essentially be the same as what you have done in React, making an HTTP request using a built-in or 3rd party library. This resource I just found has some information on how to do both.
The simplest pure http-request in node looks like this:
const http = require('http')
const url = 'http://api.openweathermap.org/data/'
http.request(url, callback).end()
function callback (weatherResponse) {
let jsonString = ''
weatherResponse.on('data', chunk => {
jsonString += chunk
})
weatherResponse.on('end', () => {
// Now you have the complete response and can do whatever you want with it
// like return it to your user `res.send(jsonString)`
console.log(jsonString)
})
}
Many people find it bulky to having to handle chunks and the whole asynchronous thing, so there are many popular npm modules, like: https://www.npmjs.com/package/axios. (And here's a list of other contenders https://github.com/request/request/issues/3143).
Also, it is normal to store API-keys in environment variables on the backend. It makes things easy if you ever try to dockerize your app, or just scale up to using two backend servers instead of one.
I found a solution based on #ippi answer, add the following part to the original code:
const request = require('request');
const url = 'http://api.openweathermap.org/data/2.5/weather?q=london,uk&APPID=1234567';
app.get('/weather', (req, res) => {
request(url, (error, response, body) => {
if (!error && response.statusCode == 200) {
var info = JSON.parse(body)
res.send(info);
}
})
})
The url can be stored in .env file and passed into the above code. The returned weather data can be viewed in JSON format at http://localhost:4000/weather. In React the weather data can be fetched via this localhost url.
EDIT: request is deprecated, so here is a solution using axios
app.get('/weather', (req, res) => {
axios.get(url)
.then(response => {res.json(response.data)})
.catch(error => {
console.log(error);
});
})
User Passport middleware for nodeJs/Express. They provide passport-headerapikey strategy using which you can create and authorize apiKeys. http://www.passportjs.org/packages/passport-headerapikey/

Change issue closing pattern for gitlab user account

I want to close an issue matching the file name pushed with Issue title (My source files are named with unique integers, e.g. 34521.cpp and there are corresponding issues on Gitlab e.g. Problem #34521).
How can I do so?
The default pattern is not suitable as I have 2000+ issues and I do not want to refer issues with the issue ID's each time. I want it to be automated. So I was checking the page :
Change the issue closing pattern.
It says I need to have access to the server where gitlab is installed. Does that mean I cannot change the issue closing pattern for Gitlab cloud's user account hosted at http://gitlab.com ?
You can't define a custom closing pattern on gitlab.com, only on your own hosted gitlab instance. But what you can do is to use webhooks to listen on push events on a remote server. You can then parse the commit messages yourself and take decision on closing issues. You can use Gitlab API to close issue on your server instance (with a hard coded access token)
This can be tested locally using an http tunnel like ngrok
The following nodejs script starts a server serving a /webhook endpoint. This webhook endpoint is called when any push occurs on your repo.
const express = require('express');
const bodyParser = require('body-parser');
const axios = require('axios');
const to = require('await-to-js').to;
const port = 3000;
const projectId = "4316159";
const accessToken = "YOUR_ACCESS_TOKEN";
const app = express();
app.use(bodyParser.json())
app.post('/webhook', async function(req, res) {
console.log("received push event");
let result, err, closeRes;
for (var i = 0; i < req.body.commits.length; i++) {
for (var j = 0; j < req.body.commits[i].added.length; j++) {
filenameWithoutExt = req.body.commits[i].added[j].split('.').slice(0, -1).join('.');
[err, result] = await to(axios({
url: `https://gitlab.com/api/v4/projects/${projectId}/issues?search=#${filenameWithoutExt}`,
method: 'GET',
headers: {
"PRIVATE-TOKEN": accessToken
}
}));
if (err) {
console.log(err);
} else {
if (result.data.length !== 0) {
//close those issues
for (item in result.data) {
console.log(`closing issue #${result.data[item].iid} with title ${result.data[item].title}`);
[err, closeRes] = await to(axios({
url: `https://gitlab.com/api/v4/projects/${projectId}/issues/${result.data[item].iid}?state_event=close`,
method: 'PUT',
headers: {
"PRIVATE-TOKEN": accessToken
}
}));
if (err) {
console.log(err);
} else {
console.log(`closing status : ${closeRes.status}`);
}
}
} else {
console.log("no issue were found");
}
}
}
}
res.sendStatus(200);
});
app.listen(port, () => console.log(`listening on port ${port}!`))
In the above you need to change the access token value & projectId. Also note that above code will check only added file, you can modify it to include updated or deleted file matching your requirements.
Launch ngrok on port 3000 ngrok http 3000 & copy the given url in integrations sections of your repo :
Now when you add any file it will check for the filename without extension and search all issue with within title #filename_without_extension and close it right away

Node.js hangs on HTTP requests

I am currently developing an Electron application which listens to the TCP socket and sends requests to the REST server after receiving data.
For test purposes I use the following code:
const net = require('net');
const fetch = require('node-fetch');
const server = net.createServer(connection => {
connection.setEncoding('utf8');
connection.on('data', data => {
handleData(data).then(() => connection.end());
});
});
const handleData = data => {
return Promise.resolve()
.then(() => test(1))
.then(() => test(2))
.then(() => test(3))
.then(() => test(4))
.then(() => test(5));
};
const test = id => {
return fetch(`https://server.domain.name/api/v1/software/${id}`)
.then(response => response.json())
.then(json => {
console.log(json);
});
};
const socketHost = '0.0.0.0';
const socketPort = 1337;
server.listen(socketPort, socketHost);
This code hangs on the first request. According to the tcpdump, request does not even leave my machine. Strange thing is when I user IP address in URL instead of domain name, requests work fine. So, I suppose this problem is somehow related to DNS lookup. Any ideas?
Edit: When running this code in pure Node.js the issue does not appear.
Edit: This issue only appears on Linux.

Node/Express - How to send get request to check a status code

I am trying to make a URL shortener. I need to take a given URL as a parameter and send a request to that URL just to get the status code. If status = 200, I know I've got a functioning URL, and I'll go ahead and add it to the DB and shorten it.
Problem is, when I make that request, the connection times out.
const express = require('express')
const mongoose = require('mongoose')
const cors = require('cors')
const nofavicon = require('express-no-favicons')
const Shortener = require('./shortener')
const app = express()
app.disable('x-powered-by')
app.use(cors())
app.use(nofavicon())
app.use(express.static(__dirname + '/static'))
mongoose.connect(
process.env.MONGODB_URI || 'mongodb://heroku_x7hcc5zd:39c8i70697o7qrpjn4rd6kslch#ds123371.mlab.com:23371/heroku_x7hcc5zd'
)
app.get('/url/:urlParam(*)', (request, response) => {
let urlParam = request.params.urlParam
let urlRegEx = /[A-Za-z]+[://]+[A-Za-z0-9-_]+\.[A-Za-z0-9-_:%&;?#/.=]+/g
if (urlRegEx.test(urlParam)) {
let shortRandomNum = Math.floor(Math.random() * 10000).toString()
// Shortener here refers to a mongoose Schema in external file
let lmao = new Shortener({
url: urlParam,
urlmao: 'localhost:8080/lol/' + shortRandomNum,
})
// Request header from passed URL to verify legitimacy
// Check statusCode and end request.
app.head(urlParam, (req, res) => {
let end = res.end
// Override standard res.end function with custom function
res.end = () => {
if (res.statusCode == 200) {
lmao.save((error) => {
if (error) {
response.send('Unable to write to collection')
}
})
console.log('pass')
response.json({lmao})
}
}
res.end = end
res.end()
})
} else {
// If passed URL does not satisfy regEx, return error message.
urlParam = 'unfunny url. http(s):// prefix required. check url and retry.'
console.log('invalid url')
response.json({
url: urlParam,
})
}
})
app.listen(process.env.PORT || 8080, () => {
console.log('live connection')
})
Most baffingly, the code shown here worked on Friday. Tested it last night, no dice. Any insight would be greatly, greatly appreciated.
app.head(urlParam, [Function]) doesn't make a request to the url, it defines a new route on your application so that it responds to HEADrequests on that url.
To check if the URL is alive you need to use another package to make requests. One of my favourites is Request. To use it simply replace app.head with request and add the require('request') to the top of your file.

How to capture http messages from Request Node library with Fiddler

Regular client initiated requests to the node server are captured fine in Fiddler. However, requests sent from node to a web service are not captured. It did not help to pass in config for proxy (127.0.0.1:8888) to the request method. How can I route the request messages through Fiddler?
var http = require('http');
var request = require('request');
request.get(webserviceURL, { "auth" : {"user": "user", "pass" = "pass", sendImmediately: true },
"proxy" : { "host" : "127.0.0.1", "port" : 8888 }},
function (error, response) { console.log( "response received" );
});
Request repo: https://github.com/mikeal/request
I just tried to do this myself (using Fiddler and the request library from npm). Here's how I got it working:
process.env['NODE_TLS_REJECT_UNAUTHORIZED'] = '0'; // Ignore 'UNABLE_TO_VERIFY_LEAF_SIGNATURE' authorization error
// Issue the request
request(
{
method: "GET",
uri: "https://secure.somewebsite.com/",
proxy: "http://127.0.0.1:8888" // Note the fully-qualified path to Fiddler proxy. No "https" is required, even for https connections to outside.
},
function(err, response, body) {
console.log("done");
});
This is with Fiddler2 using the default port and proxy options (and no proxy authentication).
Fiddler works by setting your "Internet Options" (from start menu) "Connections" > "LAN Settings" > "Proxy Server" to its port, thus making all HTTP traffic (clients which obey this setting) go through it.
You should point your node.js client lib to use a proxy, the settings are written in that options dialog after you start Fiddler.
The proxy option should be a full url, like this:
proxy : "http://127.0.0.1:8888"
To do this on an ad-hoc basis, without changing your code, you can use environment variables.
Request respects:
HTTP_PROXY
HTTPS_PROXY
NO_PROXY
So, to proxy just set these in your console before running your process.
For example, to setup http and https proxy use:
set HTTP_PROXY="http://127.0.0.1:8888"
set HTTPS_PROXY="http://127.0.0.1:8888"
set NODE_TLS_REJECT_UNAUTHORIZED=0
The latter line stops issues with SSL through the fiddler proxy.
I've been wanting the same... an equivalent of the Network tab in chrome DevTools, only for Nodejs. Unfortunately, it doesn't appear as though one exists. I don't have Fiddler on macos, so this is how I went about stubbing the require('http') methods to log and pass though. Leaving this here in case I need it again or someone else finds it helpful. You can turn it on by attaching a debugger and require('filename')() the file containing this script.
module.exports = () => {
const http = require('http');
http._request = http.request;
global.DO_LOG_AJAX = true;
const log = str => {
if (global.DO_LOG_AJAX) {
console.debug(str);
}
};
const flushLog = (requestLines, responseLines) => {
if (global.DO_LOG_AJAX) {
log([
'----------------Begin Request-----------------------------------',
...requestLines,
'----------------End Request / Begin Response--------------------',
...responseLines,
'----------------End Reponse-------------------------------------',
].join('\n'));
}
};
let write;
let end;
http.request = (...requestParams) => {
const req = http._request(...requestParams);
const { method, path, headers, host, port } = requestParams[0];
const requestLogLines = [];
requestLogLines.push(`${method} ${path}`);
requestLogLines.push(`Host: ${host}:${port}`);
for (const header of Object.keys(headers)) {
requestLogLines.push(`${header}: ${headers[header]}`);
}
write = write || req.write;
end = end || req.end;
req.on('error', err => {
log({ err });
});
req._write = write;
req._end = end;
const requestBody = [];
req.write = (...writeParams) => {
requestBody.push(writeParams[0].toString());
return req._write(...writeParams);
};
req.end = (...endParams) => {
if (endParams[0]) {
requestBody.push(endParams[0].toString());
}
requestLogLines.push('');
requestLogLines.push(requestBody.join(''));
return req._end(...endParams);
};
const responseLogLines = [];
req.once('response', response => {
const responseBody = [];
responseLogLines.push(`${response.statusCode} ${response.statusMessage}`);
for (const header of Object.keys(response.headers)) {
responseLogLines.push(`${header}: ${response.headers[header]}`);
}
const onData = chunk => {
responseBody.push(chunk.toString());
};
const onClose = err => {
responseLogLines.push('');
responseLogLines.push(responseBody.join(''));
responseLogLines.push('');
responseLogLines.push(`--- ERROR --- ${err.toString()}`);
flushLog(requestLogLines, responseLogLines);
req.removeListener('data', onData);
};
const onEnd = () => {
responseLogLines.push('');
responseLogLines.push(responseBody.join(''));
flushLog(requestLogLines, responseLogLines);
req.removeListener('data', onData);
};
response.on('data', onData);
response.once('close', onClose);
response.once('end', onEnd);
});
return req;
};
};

Resources