Google App Engine request log including severity - python-3.x

I am trying to implement the method in this Stackoverflow answer to get my logs grouped by request and showing the highest severity level of the child logs on the request.
This is what I got so far:
custom_logger.py:
import inspect
import json
import os
from flask import g, request
from google.cloud import logging as gcp_logging
from google.cloud.logging.resource import Resource
LOG_LEVELS = ('DEFAULT', 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL')
resource = Resource(type='gae_app',
labels={'project_id': os.environ['GOOGLE_CLOUD_PROJECT'],
'module_id': os.environ['GAE_SERVICE'],
'version_id': os.environ['GAE_VERSION']})
client = gcp_logging.Client()
custom_logger = client.logger('custom_logger')
request_logger = client.logger('request_logger')
def log_request(response):
trace_id = get_trace_id()
severity = LOG_LEVELS[g.get('log_level', 0)]
request_info = {
'requestMethod': request.method,
'requestUrl': request.url,
'status': response.status_code,
'userAgent': request.headers.get('USER-AGENT'),
'responseSize': response.content_length,
'latency': g.request_duration(),
'remoteIp': request.remote_addr
}
if request.method == 'POST':
payload = request.json() or json.loads(request.data.decode())
else:
payload = {}
request_logger.log_struct(payload,
trace=trace_id,
http_request=request_info,
severity=severity)
def default(text):
_log(text)
def log(text):
return default(text)
def debug(text, *args):
_log(text, *args)
def info(text, *args):
_log(text, *args)
def warning(text, *args):
_log(text, *args)
def warn(text, *args):
return warning(text, *args)
def error(text, *args):
_log(text, *args)
def critical(text, *args):
_log(text, *args)
def _log(text, *args):
trace_id = get_trace_id()
severity = inspect.stack()[1][3].upper()
new_level = LOG_LEVELS.index(severity)
previous_level = g.get('log_level', 0)
g.log_level = max(previous_level, new_level)
message = text % args
custom_logger.log_text(message, resource=resource,
severity=severity, trace=trace_id)
def get_trace_id():
return (f"projects/{os.environ['GOOGLE_CLOUD_PROJECT']}/traces/"
f"{request.headers['X-Cloud-Trace-Context'].split('/')[0]}")
main.py:
import json
import time
from flask import Flask, g, request, make_response
from flask_cors import CORS
import custom_logger as logging
app = Flask(__name__)
cors = CORS(app)
app.config['CORS_HEADERS'] = 'Content-Type'
#app.before_request
def setup_timing():
g.request_start_time = time.time()
g.request_duration = lambda: f'{(time.time() - g.request_start_time):.5f}s'
#app.after_request
def log_request(response):
logging.log_request(response)
return response
#app.route('/', methods=['GET', 'OPTIONS'])
def _test():
logger.debug('DEBUG %s', 'TEST')
logger.info('INFO %s', 'TEST')
logger.warning('WARNING %s', 'TEST')
logger.error('ERROR %s', 'TEST')
logger.critical('CRITICAL %s', 'TEST')
return 'Success'
It seems like the request_logger.log_struct function does not result in any entry being added to the logs. If I add a request_logger.log_text function before the request_logger.log_struct function then this does end up in the logs. Why do I not see the results of request_logger.log_struct in the logs?

After adding the "resource" property to the log_struct call in the log_request function I can see the requests in the "Logs viewer". The logs are now grouped and the highest severity level is added to the parent log entry.
request_logger.log_struct(payload,
resource=resource,
trace=trace_id,
http_request=request_info,
severity=severity)

Related

Store http and websoket updates in parallel

I want receive http updates and WS updates in parallel.
For this targets i use threading, but i still receive only updates from WS. It's looks like
def http_req_update is blocked by infiniti messages from def on_message
Could please someone help me with it ?
import json
import time
from websocket import WebSocketApp
import requests
import threading
class BithumbWebSocketApp(WebSocketApp):
def __init__(self, url, **kwargs):
super(BithumbWebSocketApp, self).__init__(url, **kwargs)
def _request_orderbookdepth(self, channel, event=None, payload=None, auth_required=True):
current_time = int(time.time())
data = {
"time": current_time,
"type": "orderbookdepth",
"symbols": ["BTC_KRW"],
}
data = json.dumps(data)
print('request1', data)
self.send(data)
def subscribe(self, channel, payload=None, auth_required=True):
self._request_orderbookdepth(channel, "subscribe", payload, auth_required)
def unsubscribe(self, channel, payload=None, auth_required=True):
self._request_orderbookdepth(channel, "unsubscribe", payload, auth_required)
def on_open(ws):
print('Connected')
ws.subscribe("wss://pubwss.bithumb.com/pub/ws", "BTC_KRW", False)
msg_lst = []
def on_message(ws, message):
print('message', message)
msg = json.loads(message.encode('utf-8'))
print('msg1: ', msg)
msg_lst.append({
"msg": msg,
"type": msg['type'],
"list": msg['content']['list'],
"datetime": msg['content']['datetime'],
})
lst_to_json = json.dumps(msg_lst)
def ws_update():
app = BithumbWebSocketApp("wss://pubwss.bithumb.com/pub/ws",
on_open=on_open,
on_message=on_message)
app.run_forever(ping_interval=5)
def http_req_update():
currency = 'BTC_KRW' # ALL
url = f"https://api.bithumb.com/public/orderbook/{currency}"
headers = {
"accept": "application/json",
"content-type": "application/json"
}
response = requests.get(url, headers=headers)
print('snapshot_response', response.text)
if __name__ == "__main__":
trd1 = threading.Thread(target=ws_update)
trd2 = threading.Thread(target=http_req_update)
trd1.start()
trd2.start()

How to test GET and POST api which requires user logged in pytest Django?

I have created a fixture to create user and auto login in conftest.py
#pytest.fixture
def test_password():
return 'strong-test-pass'
#pytest.fixture(scope='session')
def create_user(db, test_password):
def make_user(**kwargs):
employee = e_ge_employee.objects.create()
kwargs['password'] = test_password
if 'username' not in kwargs:
kwargs['username'] = str(uuid.uuid4())
if 'employee' not in kwargs:
kwargs['employee'] = employee
return e_ge_user.objects.create(**kwargs)
return make_user
#pytest.fixture
def auto_login_user(db, client, create_user, test_password):
def make_auto_login(user=None):
if user is None:
user = create_user()
client.login(username=user.username, password=test_password)
return client, user
return make_auto_login
and then wrote a testcase to check get api in test_urls.py
class TestUrls:
#pytest.mark.parametrize('param', [
('generate_token'),
('roi_report')
])
def test_generate_token_url(self, auto_login_user, param):
url = reverse(param)
client, user = auto_login_user()
print("client",client)
print("user",user)
resp = client.get(url)
print(resp)
assert resp.status_code == 200
It is giving me the error
{"detail":"Authentication credentials were not provided."}
in my api_views I am using authentication_classes = [SessionAuthentication, ]
permission_classes = [IsAuthenticated, ] these two classes
Can someone please provide me with the solution.....Thanks in Advance
Try rewriting your auto_login_user as follows to force_authenticate the user.
first import APIClient from rest_framework.test
# import
from rest_framework.test import APIClient
#pytest.fixture
def auto_login_user(db, create_user):
api_client = APIClient()
api_client.force_authenticate(user=create_user)
return api_client
and now update your test as follows
def test_generate_token_url(self, auto_login_user, create_user, param):
url = reverse(param)
user = create_user
resp = auto_login_user.get(url)
print(resp)
assert resp.status_code == 200
if this doesn't work maybe the problem is with the create_user fixture

Chrome makes block request

I made such a minimal example that completely repeats the behavior of my code. I make requests from firefox and chrome. I noticed that after making a request from chrome, firefox ceases to receive answers. After some research, I realized that the server response with error on localhost:8000/favicon.ico path request. After receiving error 404 once, chrome after each response from server creates another connection to the server, but does not send data, which causes a lock on the recv function.
File "/usr/lib/python3.7/socket.py", line 589, in readinto
return self._sock.recv_into(b)
I found that I can set the connection timeout for the handler class, it is taken into account in the StreamRequestHandler:r:
if self.timeout is not None:
self.connection.settimeout(self.timeout)
But I am embarrassed that there is no information about this in the documentation
https://docs.python.org/3/library/socketserver.html#socketserver.BaseRequestHandler.handle
import logging
import json
import http.server
from http import HTTPStatus
from typing import Optional
from urllib.parse import urlparse, parse_qs
import socketserver
from threading import Thread
import traceback
from functools import wraps
import sys, os
project_dir = os.path.abspath(os.curdir)
sys.path.append(project_dir)
logging.getLogger().setLevel("DEBUG")
class RESTHandler(http.server.BaseHTTPRequestHandler):
"""
Rest router for api methods
"""
def __init__(self, *args, **kwargs):
logging.info(f"Creating RESTHandler obj. Args: {args}, kwargs: {kwargs}")
super().__init__(*args, **kwargs)
def end_headers(self) -> None:
self.send_header('Access-Control-Allow-Origin', '*')
http.server.BaseHTTPRequestHandler.end_headers(self)
# noinspection PyPep8Naming
def do_GET(self):
logging.info(self.path)
url = urlparse(self.path)
if "favicon.ico" in url.path:
self.send_error(HTTPStatus.NOT_FOUND, message='Unknown api path.')
return
self.send_response(HTTPStatus.OK)
self.send_header('Content-Type', 'application/json')
self.end_headers()
self.wfile.write(json.dumps({"resp":"I am OK", "int": 5}, ensure_ascii=False).encode('utf-8'))
class ApiService():
DEFAULT_API_PORT = 8000
DEFAULT_API_HOST = ''
def __init__(self, ui_service = None, host: Optional[str] = None, port: Optional[int] = None):
self.ui_service = ui_service
self.host = host or self.DEFAULT_API_HOST
self.port = port or self.DEFAULT_API_PORT
def _run(self):
while True:
try:
with socketserver.TCPServer((self.host, self.port), RESTHandler, bind_and_activate=False) as httpd:
logging.info("Starting server....")
httpd.allow_reuse_address = True
httpd.server_bind()
httpd.server_activate()
logging.info(f"Serving API at {self.host}:{self.port}")
httpd.serve_forever()
break
except Exception as e:
tb_list = traceback.format_exception( type(e), e, tb=e.__traceback__)
tb_list = [ s.replace("\n", "") for s in tb_list ]
tb_str = "; ".join(tb_list)
logging.error(f"Unexpected exception while http-server was working: {tb_str}")
def run(self, in_thread=True):
if in_thread:
t = Thread(target=self._run)
t.start()
else:
self._run()
if __name__ == '__main__':
ApiService().run(in_thread=False)
I guess, Chrome uses web browsers pre-opening sockets, on which TCPServer would wait indefinitely in my case. But I am still interesting why only after 404 and what about legitimacy of using timeout of request handler.

Error from callback function in python websocket: line 346, in _callback callback(self, *args)

I used the Gemini websocket code for a BTC feed but a callback error keeps popping up. However, it occasionally does run the code correctly. I have used 'price' and 'quantity' to test the code before I use the actual variables I want.
Error from running my gemini websocket code
Here is the code:
import websocket, ssl, json
import _thread as thread
websocket._logging._logger.level = -99
bestbid = {}
bestask = {}
top_of_book = 0
def on_message(self, message):
global bestbid, bestask
bestbid = json.loads(message)
bestask = json.loads(message)
print("Message received!")
print("{} {}".format(bestbid["price"], bestask ["quantity"]))
def on_error(self, error):
print(error)
def on_close(self):
print("Connection closed!")
def on_open(self):
print("Connection opened!")
def run(*args):
ws.send(logon_msg)
thread.start_new_thread(run, ())
if __name__ == "__main__":
logon_msg = '{"type": "subscribe","subscriptions":[{"name":"l2","symbols":["BTCUSD"]}]}'
websocket.enableTrace(True)
ws = websocket.WebSocketApp("wss://api.gemini.com/v2/marketdata/",
on_message = on_message,
on_error = on_error,
on_close = on_close,
on_open = on_open)
ws.on_open = on_open
ws.run_forever(sslopt={"cert_reqs": ssl.CERT_NONE})
You should check that bestbid (bestbuy) JSON object has fields "price" and quantity". Errors most probably are caused by heartbeat or some diagnostic messages.

Origin http://localhost:8000 is not allowed by Access-Control-Allow-Origin by sending request to flask

I am sending GET request from localhost:8000 to flask :
$(document).ready(function() {
$('#test').click(function() {
$.getJSON("http://localhost:5000/test/", function() {
}).done(function() {
document.location = "http://localhost:5000";
}).fail(function() {
console.log("server not ready.");
});
});
});
and in "server.py" I am handling GET like:
import app
#server.route('/test/',methods = ['GET'])
def test():
print "in test status check"
return jsonify({'status':'OK'})
However I am getting this error:
XMLHttpRequest cannot load http://127.0.0.1:**5000**/test/. Origin http://127.0.0.1:**8000** is not allowed by Access-Control-Allow-Origin.
In flask you can create custom decorator to control Access Origin Policy. This article may help you: http://flask.pocoo.org/snippets/56/
Code from article:
from datetime import timedelta
from flask import make_response, request, current_app
from functools import update_wrapper
def crossdomain(origin=None, methods=None, headers=None,
max_age=21600, attach_to_all=True,
automatic_options=True):
if methods is not None:
methods = ', '.join(sorted(x.upper() for x in methods))
if headers is not None and not isinstance(headers, basestring):
headers = ', '.join(x.upper() for x in headers)
if not isinstance(origin, basestring):
origin = ', '.join(origin)
if isinstance(max_age, timedelta):
max_age = max_age.total_seconds()
def get_methods():
if methods is not None:
return methods
options_resp = current_app.make_default_options_response()
return options_resp.headers['allow']
def decorator(f):
def wrapped_function(*args, **kwargs):
if automatic_options and request.method == 'OPTIONS':
resp = current_app.make_default_options_response()
else:
resp = make_response(f(*args, **kwargs))
if not attach_to_all and request.method != 'OPTIONS':
return resp
h = resp.headers
h['Access-Control-Allow-Origin'] = origin
h['Access-Control-Allow-Methods'] = get_methods()
h['Access-Control-Max-Age'] = str(max_age)
if headers is not None:
h['Access-Control-Allow-Headers'] = headers
return resp
f.provide_automatic_options = False
return update_wrapper(wrapped_function, f)
return decorator
And here is how you can use it:
#app.route('/my_service')
#crossdomain(origin='*')
def my_service():
return jsonify(foo='cross domain ftw')

Resources