proxy-authentication header missing with https - node.js

I want to create a mitm proxy that can only be access by providing correct credentials:
(async () => {
const mockttp = require('mockttp');
// Create a proxy server with a self-signed HTTPS CA certificate:
const https = await mockttp.generateCACertificate();
const server = mockttp.getLocal({ https });
// Inject 'Hello world' responses for all requests
// Replace targets entirely with custom logic:
let counter = 0;
server.forAnyRequest().thenCallback((request) => {
console.log(JSON.stringify(request));
return {
status: 200,
// Return a JSON response with an incrementing counter:
json: { counterValue: counter++ }
};
});
await server.start(8080);
// Print out the server details:
const caFingerprint = mockttp.generateSPKIFingerprint(https.cert)
console.log(`Server running on port ${server.port}`);
console.log(`CA cert fingerprint ${caFingerprint}`);
})(); // (Run in an async wrapper so we can use top-level await everywhere)
With http it works flawlessly, the proxy-authorization header is present:
curl -k -v --proxy "user:pass#127.0.0.1:8080" http://www.google.com
{
"id":"8978f1a3-8a4f-4395-b0dc-0cf8929e760a",
"matchedRuleId":"5a1bc167-7e34-4b0d-9f51-f8e49015b349",
"protocol":"http",
"httpVersion":"1.1",
"method":"GET",
"url":"http://www.google.com/",
"path":"/",
"remoteIpAddress":"::ffff:127.0.0.1",
"remotePort":32932,
"headers":{
"host":"www.google.com",
"proxy-authorization":"Basic dXNlcjpwYXNz",
"user-agent":"curl/7.83.1",
"accept":"*/*",
"proxy-connection":"Keep-Alive"
},
"rawHeaders":[
[
"Host",
"www.google.com"
],
[
"Proxy-Authorization",
"Basic dXNlcjpwYXNz"
],
[
"User-Agent",
"curl/7.83.1"
],
[
"Accept",
"*/*"
],
[
"Proxy-Connection",
"Keep-Alive"
]
],
"tags":[
],
"timingEvents":{
"startTime":1663860475270,
"startTimestamp":7655.8840999901295,
"bodyReceivedTimestamp":7656.588100001216
},
"body":{
"buffer":{
"type":"Buffer",
"data":[
]
}
}
}
Now the problem is that if it runs through https, the proxy-authorization disappears:
curl -k -v --proxy "user:pass#127.0.0.1:8080" https://www.google.com
{
"id":"dd9f61c9-8ecb-4f94-87aa-095fd2f40da6",
"matchedRuleId":"5a1bc167-7e34-4b0d-9f51-f8e49015b349",
"protocol":"https",
"httpVersion":"1.1",
"method":"GET",
"url":"https://www.google.com/",
"path":"/",
"remoteIpAddress":"::ffff:127.0.0.1",
"remotePort":34557,
"headers":{
"host":"www.google.com",
"user-agent":"curl/7.83.1",
"accept":"*/*"
},
"rawHeaders":[
[
"Host",
"www.google.com"
],
[
"User-Agent",
"curl/7.83.1"
],
[
"Accept",
"*/*"
]
],
"tags":[
],
"timingEvents":{
"startTime":1663860737403,
"startTimestamp":269786.7910999954,
"bodyReceivedTimestamp":269787.29159998894
},
"body":{
"buffer":{
"type":"Buffer",
"data":[
]
}
}
}
Is there anything I'm unaware of that causes this behaviour?

Related

declarativeNetRequest.updateDynamicRules are not updating, how do I properly remove and add rules?

update: I noticed that the token is beinig passed on the first time I run itm, but no new tokens are being appended to my rules.
I have this chrome extension on Gmail where the user logs in and it return an access token, the token is passed to our API through HTTP Request, it works fine the first time around passing the access token, but if I don't refresh Gmail, after 1 hour the access token expires and I get 401 errors on my application. I have a function interceptURL that will match the url and give a refreshed token before the HTTP Request is made (or so I thought).
Maybe after 1h the access token is expired so calling refresh token wont generate a new token?
background script
function interceptURL(requestDetails: chrome.webRequest.WebRequestBodyDetails) {
console.log('intercepted: ' + requestDetails.url);
if (requestDetails.url.includes(liveApiUrl) || requestDetails.url.includes(testApiUrl)) {
chrome.runtime.sendMessage({ "message": "refresh_token" }, (token: string) => {
if (token == undefined) {
chrome.runtime.sendMessage({ "message": "get_token" });
}
});
}
}
chrome.webRequest.onBeforeRequest.addListener(
interceptURL,
{ urls: [liveApiUrl, testApiUrl] }
)
Here are my Rules
function GetInterceptRules(token: string) {
const allResourceTypes =
Object.values(chrome.declarativeNetRequest.ResourceType);
return [
{
id: 1,
priority: 1,
action: {
type: chrome.declarativeNetRequest.RuleActionType.MODIFY_HEADERS,
requestHeaders: [
{
operation: chrome.declarativeNetRequest.HeaderOperation.SET,
header: 'Authorization',
value: 'Bearer ' + token,
},
]
},
condition: {
urlFilter: liveApiUrl,
initiatorDomains: ["mail.google.com"],
resourceTypes: allResourceTypes,
}
},
{
id: 2,
priority: 1,
action: {
type: chrome.declarativeNetRequest.RuleActionType.MODIFY_HEADERS,
requestHeaders: [
{
operation: chrome.declarativeNetRequest.HeaderOperation.SET,
header: 'Authorization',
value: 'Bearer ' + token,
},
]
},
condition: {
urlFilter: testApiUrl,
initiatorDomains: ["mail.google.com"],
resourceTypes: allResourceTypes,
}
}
];
My thought was:
1 - I give it a refresh token before every HTTP Request so when I update the dynamic rules, it would pass the new token. (that's what I current have)
2 - I could check when the access token was created and just make sure the code to get token run before the 1 hour ends. (Maybe not the best approach?)
To get the access token
chrome.identity.launchWebAuthFlow(
{
url: azureTokenAuthUrl,
interactive: isInteractive
},
(redirectURL: string) => {
let token: string = '';
if (redirectURL != null) {
let params = new URLSearchParams(redirectURL);
token = params.get("access_token");
}
console.log("acces_token", token);
console.log(redirectURL)
UpdateIntercept(token)
callback(token)
}
Manifest V3
"permissions": [
"webRequest",
"declarativeNetRequest",
"declarativeNetRequestWithHostAccess",
"identity",
"identity.email"
],
"background": {
"service_worker": "/static/js/Background.js"
},
"content_scripts": [
{
"matches": [ "<all_urls>" ],
"css": [ "/css/bootstrap-iso.css" ],
"js": [ "react.production.min.js", "react-bootstrap.min.js", "react-dom.production.min.js" ]
},
{
"matches": [ "*://mail.google.com/*" ],
"css": [ "/css/AuthButton.css" ],
"js": [ "/static/js/AuthButton.js" ]
},
{
"matches": [ "*://mail.google.com/*" ],
"js": [ "/static/js/PushNotification.js" ]
}
],
I've been searching around but can't seem to find a solution for my problem.
I tried using JWT to decode so I know it's expired.
Added the launchAuthFlow with interactivity false to my listener where I check if an email has been opened, if so, trigger launchAuthFlow appending the token to the HTTP Request

Policy Condition failed: ["starts-with", "$Content-Type", ""] on AWS S3

I am using the aws-sdk for node.js and trying to upload an image with Content-Type: 'image/png'.
I am creating a presignedpost:
const post = s3.createPresignedPost({
Bucket: process.env.AWS_S3_BUCKET_NAME,
Fields: {
key: 'hubspot_customer_portal/' + fileName
},
Expires: 60, // seconds
Conditions: [
["starts-with", "$Content-Type", ""],
{ 'Content-Type': 'image/png' },
['content-length-range', 0, 1048576],
],
})
return post
But I get Policy Condition failed: ["starts-with", "$Content-Type", ""] in the response.
My CORS-policy on my bucket is
[
{
"AllowedHeaders": [
"*"
],
"AllowedMethods": [
"PUT",
"POST",
"DELETE"
],
"AllowedOrigins": [
"*"
],
"ExposeHeaders": []
}]
I use Axios to make a post-request with the presigned data
const filename = encodeURIComponent(file.name)
const res = await APIService.upload(filename, folder) // Get presigned data
const { url, fields } = res.data
const formData = new FormData()
Object.entries({ ...fields, file }).forEach(([key, value]: any) => {
formData.append(key, value)
})
const upload = await axios.post(url, formData)
I've also tried making the post request like this
await axios.post(url, formData,{headers:{'Content-Type':'image/png'}})
I found the solution.
I don't need the conditions at all and instead of putting { 'Content-Type': 'image/png' }, in the Conditions, I put it in the Fields instead

Logstash - parse array of JSON

I'm trying to parse SendGrid Webhook events using Logstash. The issue is that Logstash's output is not an array of JSON, but only JSON. Square brackets are missing.
The reason I'm doing this is GeoIP location and UserAgent parsing for analytics.
I am posting to 127.0.0.1:3000, then I want to forward the output to 127.0.0.1:8080.
8080 is just a basic Express server which prints requests/responses and it sends final data to ElasticSearch.
This is the input:
[
{
"email": "email#domain.com",
"event": "click",
"ip": "8.8.8.8",
"sg_event_id": "WS1wXXhERnefBsqEt5FSFA",
"sg_message_id": "mk4Msf8nQvycsZIAHQPOrw.filter0321p1iad2-30191-5E686C57-5D.0",
"timestamp": 1596484698,
"url": "http://10.0.0.6/ServiceCenter/view",
"url_offset": { "index": 1, "type": "html" },
"useragent": "Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko"
}
]
This is the output:
{
sg_event_id: 'WS1wXXhERnefBsqEt5FSFA',
event: 'click',
email: 'email#domain.com',
sg_message_id: 'mk4Msf8nQvycsZIAHQPOrw.filter0321p1iad2-30191-5E686C57-5D.0',
timestamp: 1596484698,
useragent: 'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko',
ip: '8.8.8.8',
url: 'http://10.0.0.6/ServiceCenter/view',
url_offset: { index: 1, type: 'html' }
}
This is my config:
input {
http {
host => "127.0.0.1"
port => "8080"
}
}
filter {
mutate {
remove_field => [ "#version", "#timestamp", "headers", "host" ]
}
}
output {
http {
http_method => "post"
url => "http://127.0.0.1:3000"
}
stdout {
codec => rubydebug
}
}
So finally, I've found an workaround to do it using the json_encode filter.
Used ruby code to store keys and values to #DATA[oldJSON]
Used json_encode plugin on #DATA[oldJSON] and save the results to #DATA[newJSON]
Important: Set http output format => message and content_type => "application/json; charset=UTF-8". Default is text/plain and we don't want this.
Set message value to '[ %{[#DATA][newJSON]} ]'
Config:
input {
http {
host => "127.0.0.1"
port => "8080"
}
}
filter {
mutate {
remove_field => [
"#version",
"#timestamp",
"headers",
"host"
]
}
ruby {
code => '
event.to_hash.each { |k,v|
event.set("[#DATA][oldJSON][#{k}]", v)
}
'
}
json_encode {
source => "[#DATA][oldJSON]"
target => "[#DATA][newJSON]"
}
}
output {
http {
http_method => "post"
url => "http://127.0.0.1:3000"
format => message
content_type => "application/json; charset=UTF-8"
message => '[ %{[#DATA][newJSON]} ]'
}
}
Output:
[
{
ip: '8.8.8.8',
sg_message_id: 'mk4Msf8nQvycsZIAHQPOrw.filter0321p1iad2-30191-5E686C57-5D.0',
url_offset: { type: 'html', index: 1 },
sg_event_id: 'WS1wXXhERnefBsqEt5FSFA',
email: 'email#domain.com',
event: 'click',
url: 'http://10.0.0.6/ServiceCenter/view',
timestamp: 1596484698,
useragent: 'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko'
}
]
Maybe somebody will find this useful.

"data" field not populated in axios response from express server

I am trying to access data from a nodejs server using Express on the server and Axios on the backend.
This is the endpoing I am trying to reach: http://gentle-bastion-49098.herokuapp.com/api/filters
As you can see it actually returns data when you navigate to it. But when I try to access it using the following code:
const BASE_URL = 'http://gentle-bastion-49098.herokuapp.com/api'
function getFilterData () {
const url = `${BASE_URL}/filters`
return axios.get(url)
}
getFilterData()
.then(function (response) {
console.log('filter', response)
})
.catch(err => {
alert('Could not get filters ' + err.message.toString())
})
I get this response with the "data" field being unpopulated where I'm expecting it to contain the JSON you see in the URL.
{
"data": "",
"status": 200,
"statusText": "OK",
"headers": {},
"config": {
"url": "http://gentle-bastion-49098.herokuapp.com/api/filters",
"method": "get",
"headers": {
"Accept": "application/json, text/plain, */*"
},
"transformRequest": [null],
"transformResponse": [null],
"timeout": 0,
"xsrfCookieName": "XSRF-TOKEN",
"xsrfHeaderName": "X-XSRF-TOKEN",
"maxContentLength": -1
},
"request": {}
}
Here is the back end code
const express = require('express');
const app = express();
const async = require('async');
const request = require('request');
const http = require('http');
const EventSource = require('eventsource');
const port = process.env.PORT || 8080;
const bodyParser = require('body-parser');
const jsonParser = bodyParser.json()
app.get('/api/filters', function(req, res) {
let filtersResponse = {
"ID": "CONV_DATA#IVA",
"ApplicationName": "InterationsView",
"Type": "FILT_DETAIL",
"filters": [{
"Name": "ChannelType",
"Values": uniqueFilters.ChannelType,
},
{
"Name": "sessionType",
"Values": uniqueFilters.sessionType,
},
{
"Name": "Direction",
"Values": uniqueFilters.Direction,
},
{
"Name": "Status",
"Values": uniqueFilters.Status,
},
{
"Name": "statusReason",
"Values": uniqueFilters.statusReason,
},
],
"minDuration": uniqueFilters.minDuration,
"maxDuration": uniqueFilters.maxDuration,
"minData": "2019-08-29T22:28:47.029UTC",
"maxDate": "2019-08-29T22:28:49.578UTC"
};
// Respond with filters
res.json(filtersResponse);
});
Any ideas as to why the data field is unpopulated even though when accessed through browser or postman it returns the desired data? Is it a problem with the back end or the way the request is being made? Thanks.
I have also enabled cross-orgin resource sharing on my browser. Not doing so results in an error
I am not clear whether you are not getting axios response or response from your node server. If you have problem in getting axios response here is the code.
I have used request npm for making a get request.
const request = require('request');
apiUrl = "http://gentle-bastion-49098.herokuapp.com/api/filters"
request.get(
{
url: apiUrl,
json: true
},
function (error, response, body) {
if (error) {
console.log("Error Occurred :", error);
}
console.log("Response Data :", body)
}
);
The above code will give you response as :
{
"ID":"CONV_DATA#IVA",
"ApplicationName":"InterationsView",
"Type":"FILT_DETAIL",
"filters":[
{
"Name":"ChannelType",
"Values":[
"Phone",
"Web-Chat",
"Google-Assistant"
]
},
{
"Name":"sessionType",
"Values":[
"nlu-voice",
"nlu-text"
]
},
{
"Name":"Direction",
"Values":[
"In"
]
},
{
"Name":"Status",
"Values":[
"Complete",
"Started"
]
},
{
"Name":"statusReason",
"Values":[
"END"
]
}
],
"minDuration":9.7,
"maxDuration":154.2,
"minData":"2019-08-29T22:28:47.029UTC",
"maxDate":"2019-08-29T22:28:49.578UTC"
}
which is same as what you get in browser when you visit the link http://gentle-bastion-49098.herokuapp.com/api/filters
If you are using axios the code will be :
const axios = require('axios');
apiUrl = "http://gentle-bastion-49098.herokuapp.com/api/filters"
axios.get(apiUrl)
.then(function (response) {
console.log("Response Data :", response.data);
})
.catch(function (error) {
console.log("Error Occurred :", error);
})
and it will give same response as above.
Even your written code is giving response :
Try with these changes:
getFilterData().then(response => {
console.log('filter', response.data)
})
.catch(err => {
alert('Could not get filters ' + err.message.toString())
})
In your server code, send the response back to client using res.send() as shown below:
app.get('/api/filters', function(req, res) {
let filtersResponse = {
"ID": "CONV_DATA#IVA",
"ApplicationName": "InterationsView",
"Type": "FILT_DETAIL",
"filters": [{
"Name": "ChannelType",
"Values": uniqueFilters.ChannelType,
},
{
"Name": "sessionType",
"Values": uniqueFilters.sessionType,
},
{
"Name": "Direction",
"Values": uniqueFilters.Direction,
},
{
"Name": "Status",
"Values": uniqueFilters.Status,
},
{
"Name": "statusReason",
"Values": uniqueFilters.statusReason,
},
],
"minDuration": uniqueFilters.minDuration,
"maxDuration": uniqueFilters.maxDuration,
"minData": "2019-08-29T22:28:47.029UTC",
"maxDate": "2019-08-29T22:28:49.578UTC"
};
// Respond with filters
res.send(
filtersResponse
)
});

Atlassian Connect-Express: JIRA REST API authentication within the JIRA plugin

i am using the atlassian-connect-express toolkit for creating Atlassian Connect based Add-ons with Node.js.
It provides Automatic JWT authentication of inbound requests as well as JWT signing for outbound requests back to the host.
The add-on is authenticated when i install it in the JIRA dashboard and return the following pay-load:
{ key: 'my-add-on',
clientKey: '*****',
publicKey: '********'
sharedSecret: '*****'
serverVersion: '100082',
pluginsVersion: '1.3.491',
baseUrl: 'https://myaccount.atlassian.net',
productType: 'jira',
description: 'Atlassian JIRA at https://myaccount.atlassian.net ',
eventType: 'installed' }
But i am not able to authenticate the JIRA Rest Api with the JWT token generated by the framework. It throws below error message.
404 '{"errorMessages":["Issue does not exist or you do not have permission to see it."],"errors":{}}'
below is the code when i send a GET request:
app.get('/getissue', addon.authenticate(), function(req, res){
var request = require('request');
request({
url: 'https://myaccount.atlassian.net/rest/api/2/issue/ABC-1',
method: 'GET',
}, function(error, response, body){
if(error){
console.log("error!");
}else{
console.log(response.statusCode, body);
}
});
res.render('getissue');
});
Below is the code for my app descriptor file:
{
"key": "my-add-on",
"name": "Ping Pong",
"description": "My very first add-on",
"vendor": {
"name": "Ping Pong",
"url": "https://www.example.com"
},
"baseUrl": "{{localBaseUrl}}",
"links": {
"self": "{{localBaseUrl}}/atlassian-connect.json",
"homepage": "{{localBaseUrl}}/atlassian-connect.json"
},
"authentication": {
"type": "jwt"
},
"lifecycle": {
"installed": "/installed"
},
"scopes": [
"READ",
"WRITE"
],
"modules": {
"generalPages": [
{
"key": "hello-world-page-jira",
"location": "system.top.navigation.bar",
"name": {
"value": "Hello World"
},
"url": "/hello-world",
"conditions": [{
"condition": "user_is_logged_in"
}]
},
{
"key": "getissue-jira",
"location": "system.top.navigation.bar",
"name": {
"value": "Get Issue"
},
"url": "/getissue",
"conditions": [{
"condition": "user_is_logged_in"
}]
}
]
}
}
I am pretty sure this is not the correct way i am doing, Either i should use OAuth. But i want to make the JWT method for authentication work here.
Got it working by checking in here Atlassian Connect for Node.js Express Docs
Within JIRA ADD-On Signed HTTP Requests works like below. GET and POST both.
GET:
app.get('/getissue', addon.authenticate(), function(req, res){
var httpClient = addon.httpClient(req);
httpClient.get('rest/api/2/issue/ABC-1',
function(err, resp, body) {
Response = JSON.parse(body);
if(err){
console.log(err);
}else {
console.log('Sucessful')
}
});
res.send(response);
});
POST:
var httpClient = addon.httpClient(req);
var postdata = {
"fields": {
"project":
{
"key": "MYW"
},
"summary": "My Story Name",
"description":"My Story Description",
"issuetype": {
"name": "Story"
}
}
}
httpClient.post({
url: '/rest/api/2/issue/' ,
headers: {
'X-Atlassian-Token': 'nocheck'
},
json: postdata
},function (err, httpResponse, body) {
if (err) {
return console.error('Error', err);
}
console.log('Response',+httpResponse)
});
You should be using global variable 'AP' that's initialized by JIRA along with your add-on execution. You may explore it with Chrome/Firefox Debug.
Have you tried calling ?
AP.request(..,...);
instead of "var request = require('request');"
You may set at the top of the script follwing to pass JS hinters and IDE validations:
/* global AP */
And when using AP the URL should look like:
url: /rest/api/2/issue/ABC-1
instead of:
url: https://myaccount.atlassian.net/rest/api/2/issue/ABC-1
My assumption is that ABC-1 issue and user credentials are verified and the user is able to access ABC-1 through JIRA UI.
Here is doc for ref.: https://developer.atlassian.com/cloud/jira/software/jsapi/request/

Resources