I'm trying to use access tokens from #azure/identity to connect to azure sql using mssql (which uses tedious behind the scenes). The access tokens don't seem to work as is (quite similar to python - more on this later).
I have the following code:
const identity = require("#azure/identity")
function getConfig(accessToken){
var config = {
"authentication": {
"type": "azure-active-directory-access-token",
"options": {
"token": accessToken
}
},
"server": "dbserver.database.windows.net",
"options": {
"encrypt": true,
"database": "dbname",
}
};
return config;
}
const cred = new identity.DefaultAzureCredential();
const token = await cred.getToken("https://database.windows.net/.default")
const conf = getConfig(token.token)
let pool = await sql.connect(conf)
This always fails with "Login failed for user ''".
I have the following python code which does exactly the same:
def get_token():
creds = identity.DefaultAzureCredential()
token = creds.get_token("https://database.windows.net/.default")
tokenb = bytes(token.token, "UTF-8")
exptoken = b''
for i in tokenb:
exptoken += bytes({i})
exptoken += bytes(1)
tokenstruct = struct.pack("=i", len(exptoken)) + exptoken
return tokenstruct
def execute_query():
access_token = get_token()
print(access_token)
sql_server_name = "db-server"
sql_server_db = "database_name"
SQL_COPT_SS_ACCESS_TOKEN = 1256
connString = f"Driver={{ODBC Driver 17 for SQL Server}};SERVER={sql_server_name}.database.windows.net;DATABASE={sql_server_db}"
conn = pyodbc.connect(connString, attrs_before={
SQL_COPT_SS_ACCESS_TOKEN: access_token})
cursor = conn.cursor()
cursor.execute("SELECT * from SYSOBJECTS")
row = cursor.fetchone()
while row:
print(row)
row = cursor.fetchone()
This works perfectly. I've also noticed the following:
If I take the access token from the node version (printed by console.log) and pass it to the python code in please of access_token, I get the same error from python (Login failed for user '').
If I pass the access token from javascript and pass it to the python code for token.token (in get_token), then it works perfectly.
So I'm guessing the binary padding and packing thing that's working for python needs to be done for the node code to work. Is there some way of doing this? Or is there some better way to pass an access token from azure-identity to tedious?
Doh... I was using node-mssql which is the abandoned 0.0.1 library. Switching to mssql (v6.3.1) uses a recent version of tedious, and the access token works directly.
Related
I have written a code for locust load testing for my case, where i can do a token call and then will make feature calls as per below code.
This is working fine with single token and 'n' number of users mentioned in master
I made a token call outside class and sending it as parameter to ENV. The userclass is reading the single token and using the same for all users.
I do not want to make token call inside the class, as it generates new token in every execution.
I'm looking if there is anyway like making token calls based on number of users mentioned on master -u and using only those tokens in User class.
Please suggest me if there is any documentation pointer which i can refer for this usecase
#! /usr/bin/python3.6
import json
from locust import HttpUser, task, constant, tag, events
from locust.log import setup_logging
import os
from datetime import datetime
import requests
setup_logging("INFO", None)
#events.init_command_line_parser.add_listener
def init_parser(parser):
parser.add_argument("--resp-delay", type=str, env_var="LOCUST_RESP_DELAY", default="", help="It's working")
parser.add_argument("--resp-size", type=str, env_var="LOCUST_RESP_SIZE", default="", help="It's working")
parser.add_argument("--env-endpoint", type=str, env_var="LOCUST_ENV_ENDPOINT", default="", help="It's working")
#events.init.add_listener
def _(environment, **kw):
os.environ['resp-delay'] = environment.parsed_options.resp_delay
os.environ['resp-size'] = environment.parsed_options.resp_size
os.environ['env-endpoint'] = environment.parsed_options.env_endpoint
with open("resources/data/" + environment.parsed_options.env_endpoint + '/data.json') as f:
data = json.load(f)
cal_transaction_id = datetime.now().strftime('%Y%m%dT%H%M%S')
#retrive cliend id and client secret from bitbucket repo
dict_car_app_all = data["data"]
print("env-endpoint:" + os.environ.get("env-endpoint"))
token_url = "https://ENDPOINT/auth/token"
#retrive cliend id and client secret from bitbucket repo
token_payload = "client_id=" + dict_car_app_all[0]["client_id"] + "&client_secret=" + dict_car_app_all[0]["client_secret"]
token_headers = {
'Content-Type': 'application/x-www-form-urlencoded'
}
response = requests.request("POST", token_url, data=token_payload, headers=token_headers,
cert=( 'resources/certs/' + environment.parsed_options.env_endpoint + '/MyCERT.pem',
'resources/certs/' + environment.parsed_options.env_endpoint + '/MYCERT.key'))
result = json.loads(response.text)
token = result["access_token"]
os.environ['access_token'] = token
os.environ['cal_transaction_id'] = cal_transaction_id
#class User_1(User):
class User_0(HttpUser):
wait_time = constant(1)
host = "host.com"
#tag('tag1')
#task
def load_test_api_tag1(self):
token_0 = os.environ.get('access_token')
cal_transaction_id = os.environ.get('cal_transaction_id')
env_endpoint = os.environ.get('env-endpoint')
resp_delay = os.environ.get("resp-delay")
resp_size = os.environ.get("resp-size")
feature_headers = {
'Authorization': "Bearer " + str(token_0),
'sm_transactionID': cal_transaction_id
}
url = "https://ENDPOINT/SERVICE/mytestservice/first_test"
querystring = {"response_delay": resp_delay, "data_size": resp_size}
self.client.request("GET", url, headers=feature_headers, params=querystring,
cert = ('resources/certs/' + env_endpoint + '/MyCERT.pem',
'resources/certs/' + env_endpoint + '/MyCERT.key'))
You can generate tokens in User class's on_start method so each user generates a new token when spawning.
class MyUser(User):
def on_start(self):
#generate token here and assign an instance variable like self.token=abc
super().on_start()
there is a drawback to this though, if your user count is more than your token generating service can handle some users will not be able to spawn, the way I do in my tests is if token generating part is not a part of the system I am testing, I generate tokens beforehand and write it in some file or some external db and read them from there.
I am successfully able to authorize my application with a 3rd party OAuth2 provider (Xero), but have been unable to refresh the token, either automatically, or manually.
The documentation suggests authlib can do this automatically. I have tried two different approaches from the Authlib documentation, on the flask client docs they give an example of "Auto Update Token via Signal", and on the web client docs they register an "update_token" function.
Using either approach, there is never an attempt made to refresh the token, the request is passed to Xero with the expired token, I receive an error, and the only way to continue is to manually re-authorize the application with Xero.
Here is the relevant code for the "update_token" method from the web client docs:
#this never ends up getting called.
def save_xero_token(name,token,refresh_token=None,access_token=None,tenant_id=None):
logging.info('Called save xero token.')
#removed irrelevant code that stores token in NDB here.
cache = Cache()
oauth = OAuth(app,cache=cache)
oauth.register(name='xero',
client_id = Meta.xero_consumer_client_id,
client_secret = Meta.xero_consumer_secret,
access_token_url = 'https://identity.xero.com/connect/token',
authorize_url = 'https://login.xero.com/identity/connect/authorize',
fetch_token = fetch_xero_token,
update_token = save_xero_token,
client_kwargs={'scope':' '.join(Meta.xero_oauth_scopes)},
)
xero_tenant_id = 'abcd-123-placeholder-for-stackoverflow'
url = 'https://api.xero.com/api.xro/2.0/Invoices/ABCD-123-PLACEHOLDER-FOR-STACKOVERFLOW'
headers = {'Xero-tenant-id':xero_tenant_id,'Accept':'application/json'}
response = oauth.xero.get(url,headers=headers) #works fine until token is expired.
I am storing my token in the following NDB model:
class OAuth2Token(ndb.Model):
name = ndb.StringProperty()
token_type = ndb.StringProperty()
access_token = ndb.StringProperty()
refresh_token = ndb.StringProperty()
expires_at = ndb.IntegerProperty()
xero_tenant_id = ndb.StringProperty()
def to_token(self):
return dict(
access_token=self.access_token,
token_type=self.token_type,
refresh_token=self.refresh_token,
expires_at=self.expires_at
)
For completeness, here's how I store the initial response from Xero (which works fine):
#app.route('/XeroOAuthRedirect')
def xeroOAuthLanding():
token = oauth.xero.authorize_access_token()
connections_response = oauth.xero.get('https://api.xero.com/connections')
connections = connections_response.json()
for tenant in connections:
print('saving first org, this app currently supports one xero org only.')
save_xero_token('xero',token,tenant_id=tenant['tenantId'])
return 'Authorized application with Xero'
How can I get automatic refreshing to work, and how can I manually trigger a refresh request when using the flask client, in the event automatic refreshing fails?
I believe I've found the problem here, and the root of it was the passing of a Cache (for temporary credential storage) when initializing OAuth:
cache = Cache()
oauth = OAuth(app,cache=cache)
When the cache is passed, it appears to preempt the update_token (and possibly fetch_token) parameters.
It should be simply:
oauth = OAuth(app)
oauth.register(name='xero',
client_id = Meta.xero_consumer_client_id,
client_secret = Meta.xero_consumer_secret,
access_token_url = 'https://identity.xero.com/connect/token',
authorize_url = 'https://login.xero.com/identity/connect/authorize',
fetch_token = fetch_xero_token,
update_token = save_xero_token,
client_kwargs={'scope':' '.join(Meta.xero_oauth_scopes)},
)
in addition, the parameters on my "save_xero_token" function needed to be adjusted to match the documentation, however this was not relevant to the original problem the question was addressing.
I would like to read my secret from a pod with python.
I try with this:
import os
import hvac
f = open('/var/run/secrets/kubernetes.io/serviceaccount/token')
jwt = f.read()
client = hvac.Client()
client = hvac.Client(url='https://vault.mydomain.internal')
client.auth_kubernetes("default", jwt)
print(client.read('secret/pippo/pluto'))
I'm sure that secret/pippo/pluto exists.
I'm sure that I'm properly authenticated
But I always receive "None" in answer to my print.
Where can I look to solve this ?
Thx a lot
If you read KV value from Vault, you need the Mount Point and the Path.
Example:
vault_client.secrets.kv.v1.read_secret(
path=path,
mount_point=mount_point
)
i've tried the method you provided in my k8s Python3 pod, i can get Vault secret data successfully.
You need to specify the correct vault token parameter in your hvac.Client and disable client.auth_kubernetes method.
Give it a shot and remember your code should run in k8s Python container instead of your host machine.
import hvac
f = open('/var/run/secrets/kubernetes.io/serviceaccount/token')
jwt = f.read()
print("jwt:", jwt)
f.close()
client = hvac.Client(url='http://vault:8200', token='your_vault_token')
# res = client.auth_kubernetes("envelope-creator", jwt)
res = client.is_authenticated()
print("res:", res)
hvac_secrets_data_k8s = client.read('secret/data/compliance')
print("hvac_secrets_data_k8s:", hvac_secrets_data_k8s)
Below is the result:
92:qfedu shawn$ docker exec -it 202a119367a4 bash
airflow#airflow-858d8c6fcf-bgmwn:~$ ls
airflow-webserver.pid airflow.cfg config dags logs test_valut_in_webserver.py unittests.cfg webserver_config.py
airflow#airflow-858d8c6fcf-bgmwn:~$ python test_valut_in_webserver.py
jwt: eyJhbGciOiJSUzI1NiIsImtpZCI6IiJ9.eyJpc3MiOiJrdWJlcm5ldGVzL3NlcnZpY2VhY2NvdW50Iiwia
res: True
hvac_secrets_data_k8s: {'request_id': '80caf0cb-8c12-12d2-6517-530eecebd1e0', 'lease_id': '', 'renewable': False, 'lease_duration': 0, 'data': {'data': {'s3AccessKey': 'XXXX', 's3AccessKeyId': 'XXXX', 'sftpPassword': 'XXXX', 'sftpUser': 'XXXX'}, 'metadata': {'created_time': '2020-02-07T14:04:26.7986128Z', 'deletion_time': '', 'destroyed': False, 'version': 4}}, 'wrap_info': None, 'warnings': None, 'auth': None}
As #shawn mentioned above, below commands work for me as well
import hvac
vault_url = 'https://<vault url>:8200/'
vault_token = '<vault token>'
ca_path = '/run/secrets/kubernetes.io/serviceaccount/ca.crt'
secret_path = '<secret path in vault>'
client = hvac.Client(url=vault_url,token=vault_token,verify= ca_path)
client.is_authenticated()
read_secret_result = client.read(secret_path)
print(read_secret_result)
print(read_secret_result['data']['username'])
print(read_secret_result['data']['password'])
Note: ca_path is where the pod stores k8s CA and usually it should be found under "/run/secrets/kubernetes.io/serviceaccount/ca.crt"
I found it easier to use hvac for authentication, and then use the API directly
Can skip this and use root/dev token for testing
import hvac as h
client = h.Client(url='https://<vault url>:8200/')
username = input("username")
import getpass
password = getpass.getpass()
print(client.token)
del username,password
Get the list of mounts
import requests,json
vault_url = 'https://<vault url>:8200/'
vault_token = '<vault token>'
headers = {
'X-Vault-Token': vault_token
}
response = requests.get(vault_url+'v1/sys/mounts', headers=headers)
json.loads(response.text).keys() #The ones ending with / is your mount name
Then get the password (have to create one fist)
mount = '<mount name>'
secret = '<secret name>'
response = requests.get(vault_url+'v1/'+mount+'/'+secret, headers=headers)
response.text
For the username/password to get access to password created by root, you have to add path in the JSON under Policies.
I am working on a project with the following architecture:
UI: React on client and server-side rendering via a Node server, Apollo Client for GraphQL,
API: Django handles GraphQL queries through Graphene.
I use Auth0 (JWT based) for my frontend authentication. I would like to use the token I get to authenticate my user in the context of the GraphQL queries API side.
[Edit2]
To pass the token to my API, I use:
const idToken = cookie.load('idToken') || null;
networkInterface.use([{
applyMiddleware(req, next) {
if (!req.options.headers) {
req.options.headers = {}; // Create the header object if needed.
}
req.options.headers.authorization = `Bearer ${idToken}`;
next();
}
}]);
Then I need to retrieve it in Django: I use django-jwt-auth and the code proposed by #Craig Ambrose.
My authorization header is received and decoded (I can get the payload) but there is a problem when verifying the signature: I get "Error decoding signature."
This is strange since the signature is verified when I test it on jwt.io.
How can I authenticate on Django side ?
I've just done this using django-jwt-auth (not using Auth0)
That package provides a JSONWebTokenAuthMixin that you can combine with the GraphQLView from graphene_django, for example.
from jwt_auth.mixins import JSONWebTokenAuthMixin
class AuthGraphQLView(JSONWebTokenAuthMixin, GraphQLView):
pass
urlpatterns = [
url(r'^graphql', csrf_exempt(AuthGraphQLView.as_view(schema=schema))),
url(r'^graphiql', include('django_graphiql.urls')),
]
This works, but I found that graphiql stopped working, because it wasn't sending to token. I wanted to keep using cookie based auth for that, for dev purposes, so changed it to the following.
from jwt_auth.mixins import JSONWebTokenAuthMixin
class OptionalJWTMixin(JSONWebTokenAuthMixin):
def dispatch(self, request, *args, **kwargs):
auth = get_authorization_header(request)
if auth:
return super(OptionalJWTMixin, self).dispatch(request, *args, **kwargs)
else:
return super(JSONWebTokenAuthMixin, self).dispatch(request, *args, **kwargs)
class AuthGraphQLView(OptionalJWTMixin, GraphQLView):
pass
urlpatterns = [
url(r'^graphql', csrf_exempt(AuthGraphQLView.as_view(schema=schema))),
url(r'^graphiql', include('django_graphiql.urls')),
]
My setting is working now:
I have used code from #Craig Ambrose with django-jwt-auth. I had to fork the package on Github to handle the Audience 'aud' payload present in Auth0 Token.
def jwt_get_user_id_from_payload_handler(payload):
sub = payload.get('sub')
Auth0User = import_string('project.models.Auth0User')
auth0_user = Auth0User.objects.filter(auth0_id=sub)[0]
user_id = auth0_user.user.id
return user_id
JWT_PAYLOAD_GET_USER_ID_HANDLER = jwt_get_user_id_from_payload_handler
auth0_key = '<MyAuth0SECRET>'
JWT_SECRET_KEY = base64.b64decode(auth0_key.replace("_","/").replace("-","+"))
JWT_VERIFY = True
JWT_AUTH_HEADER_PREFIX = 'Bearer'
JWT_AUDIENCE = '<MyAuth0CLIENT_ID>'
With Aut0User a model with OnoToOne relation with classic Django user and a field with auth0_id.
I query Google Calendar a program written in Python 2.7, in server to server mode (using a certificate). Typically like this:
with open('mycertfromgoogleconsole.p12', 'rb') as f:
private_key = f.read()
credentials = oauth2client.client.SignedJwtAssertionCredentials(
'something#developer.gserviceaccount.com,
private_key,
'https://www.googleapis.com/auth/calendar.readonly',
sub='theaccounttoimpersonate#example.com'
)
http_auth = credentials.authorize(httplib2.Http())
service = apiclient.discovery.build('calendar', 'v3', http=http_auth)
I now need to port the script to Python 3.4 but cannot find a library which would support the backend version of oAuth (there are several which seem to support the web version one). Would you know is there is one available? (ideally a drop-in replacement but that would be close to miraculous)
I finally managed to make it work, the code below follows the Google API documentation and the last request yields the token data mentioned in "Handling the response" in the docs above
import requests
import jwt
import arrow
import OpenSSL.crypto
class GetToken(object):
"""
Get Google API token as documented at https://developers.google.com/identity/protocols/OAuth2ServiceAccount
"""
def __init__(self):
self.expires = 0
self.token = None
def gettoken(self):
now = arrow.now().timestamp
# is the token still valid and exists ? (within a 10 seconds margin)
if now < (self.expires - 10) and self.token:
return self.token
self.expires = now + 3600
claim = {
"iss": "dev_account_from_google_console#developer.gserviceaccount.com",
"scope": "https://www.googleapis.com/auth/calendar.readonly",
"aud": "https://www.googleapis.com/oauth2/v3/token",
"exp": now,
"iat": now,
"sub": "account_with_delegated_rights#example.com"
}
p12 = OpenSSL.crypto.load_pkcs12(open('certificate_from_google_api_console.p12', 'rb').read(), 'notasecret')
private_key = OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, p12.get_privatekey()).decode('utf-8')
myjwt = jwt.encode(claim, private_key, algorithm='RS256', headers={"alg": "RS256", "typ": "JWT"}).decode('utf-8')
data = {
"grant_type": "urn:ietf:params:oauth:grant-type:jwt-bearer",
"assertion": myjwt
}
headers = {"Content-Type": "application/x-www-form-urlencoded"}
r = requests.post("https://www.googleapis.com/oauth2/v3/token", data=data, headers=headers)
print(r.json())
if __name__ == "__main__":
t = GetToken()
t.gettoken()