post and put api call through tornado - python-3.x

I wanted to post and update data in yaml file through tornado api call .. could you please give some code example
import tornado.web
import tornado.ioloop
import nest_asyncio
import json
class basicRequestHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello , world...." )
if __name__ =="__main__":
app = tornado.web.Application([(r"/", basicRequestHandler)])
app.listen(8881)
print("I'm listening on port 8881")
tornado.ioloop.IOLoop.current().start()

Prepared for you this example:
from typing import Dict
import tornado.web
import tornado.ioloop
import yaml
from tornado.escape import json_decode
class BaseHandler(tornado.web.RequestHandler):
yaml_filename = 'data.yaml'
json_args: Dict
async def prepare(self):
self.json_args = json_decode(self.request.body) or {}
def data_received(self, chunk):
...
class MainHandler(BaseHandler):
def post(self):
with open(self.yaml_filename, 'w') as file:
# ...
yaml.dump(self.json_args, file)
def put(self):
with open(self.yaml_filename) as file:
yaml_data = yaml.full_load(file)
# ...
yaml_data.update(self.json_args)
# ...
with open(self.yaml_filename, 'w') as file:
yaml.dump(yaml_data, file)
if __name__ == "__main__":
app = tornado.web.Application([(r"/", MainHandler)])
app.listen(8881)
print("I'm listening on port 8881")
tornado.ioloop.IOLoop.current().start()

Related

How to send ros2 messages from a websocket server to connected clients in tornado

I have a ros2 publisher script that sends custom messages from ros2 nodes. What I need to do is to have a subscriber (which is also my websocket server) to listen to the message that the pulisher sends then convert it to a dictionary and send it as a json from the websocket server to a connected websocket client. I have already checked the rosbridge repo but I could not make it work. It doesn't have enough documentation and I am new to ros.
I need something like this:
import rclpy
import sys
from rclpy.node import Node
import tornado.ioloop
import tornado.httpserver
import tornado.web
import threading
from custom.msg import CustomMsg
from .convert import message_to_ordereddict
wss = []
class wsHandler(tornado.websocket.WebSocketHandler):
def open(self):
print 'Online'
if self not in wss:
wss.append(self)
def on_close(self):
print 'Offline'
if self in wss:
wss.remove(self)
def wsSend(message):
for ws in wss:
ws.write_message(message)
class MinimalSubscriber(Node):
def __init__(self):
super().__init__('minimal_subscriber')
self.subscription = self.create_subscription(CustomMsg, 'topic', self.CustomMsg_callback, 10)
self.subscription # prevent unused variable warning
def CustomMsg_callback(self, msg):
ws_message = message_to_ordereddict(msg)
wsSend(ws_message)
if __name__ == "__main__":
http_server = tornado.httpserver.HTTPServer(tornado.web.Application(wsHandler))
http_server.listen(8888)
main_loop = tornado.ioloop.IOLoop.instance()
# Start main loop
main_loop.start()
so the callback function in MinimalSubscriber class, receives the ros message, converts it to dictionary and sends it to websocket client. I am a bit confused how to make these two threads (ros and websocket) to communicate with each other.
So I think I got a bit confused myself going through the threading. So I changed my approach and made it work using the tornado periodic callback and the spin_once function of rclpy as the callback function. I would post my solution as it might help some people who has the same issue.
import queue
import rclpy
from rclpy.node import Node
import tornado.ioloop
import tornado.httpserver
import tornado.web
from custom.msg import CustomMsg
from .convert import message_to_ordereddict
wss = []
class wsHandler(tornado.websocket.WebSocketHandler):
#classmethod
def route_urls(cls):
return [(r'/',cls, {}),]
def open(self):
print 'Online'
if self not in wss:
wss.append(self)
def on_close(self):
print 'Offline'
if self in wss:
wss.remove(self)
def make_app():
myWebHandler = wsHandler.route_urls()
return tornado.web.Application(myWebHandler)
message_queue = queue.Queue
class MinimalSubscriber(Node):
def __init__(self):
super().__init__('minimal_subscriber')
self.subscription = self.create_subscription(CustomMsg, 'topic', self.CustomMsg_callback, 10)
self.subscription # prevent unused variable warning
def CustomMsg_callback(self, msg):
msg_dict = message_to_ordereddict(msg)
msg_queue.put(msg_dict)
if __name__ == "__main__":
rclpy.init(args=args)
minimal_subscriber = MinimalSubscriber()
def send_ros_to_clients():
rclpy.spin_once(minimal_subscriber)
my_msg = msg_queue.get()
for client in ws_clients:
client.write_message(my_msg)
app = make_app()
server = tornado.httpserver.HTTPServer(app)
server.listen(8888)
tornado.ioloop.PeriodicCallback(send_ros_to_clients, 1).start()
tornado.ioloop.IOLoop.current().start()
minimal_subscriber.destroy_node()
rclpy.shutdown()
I also implemented the wsSend function into the send_ros_to_clients function. Some might say that using a global queue is not the best practice but I could not come up with another solution. I would appreciate any suggestions or corrections on my solution.

Using python compiled protobuf pb2 as key and value serializer

I am trying to read data from a kafka topiv which has been serialized using google's protobuf.
I compiled the proto files using protoc which generated pb2 files.
Now i am trying to use faust and create a stream processor but i can't find the correct way to use the pb2 files as key_serializer and value_serializer.
Here is what i have tried:
import faust
from proto.topic_pb2 import topic
app = faust.App(
'faust-consumer',
broker='kafka://',
store="memory://",
cache="memory://",
)
schema = faust.Schema(
## key_type=topic.PK,
## value_type=topic,
key_serializer=topic.PK,
value_serializer=topic,
)
topic = app.topic(
'topic',
schema=schema
)
#app.agent(topic)
async def consume(topic):
async for event in topic:
print(event)
if __name__ == "__main__":
app.main()
Does anybody have any idea how to used the pb2 in the serializers?
Man, I was trying to do the same the past week. After struggling I finally got something working - not the best way - but it works well enough.
So initially I used this python compiler: https://github.com/danielgtaylor/python-betterproto to generate the *.py files with dataclasses / type hinting.
Then, I was able to create Faust.Record classes dynamically by using a helper:
import abc
import inspect
from typing import Type
import betterproto
import faust
GENERATED_SUFFIX = "__FaustRecord_Auto"
def _import_relative_class(module: str, klass_name: str):
resolved_import = __import__(module, fromlist=[klass_name])
klass = getattr(resolved_import, klass_name)
return klass
def _is_record(attype: Type):
return (
inspect.isclass(attype)
and isinstance(attype, betterproto.Message)
or isinstance(attype, abc.ABCMeta)
)
def _build_record_annotations(klass: Type):
annotations = {}
for atname, attype in klass.__annotations__.items():
if _is_record(attype):
annotations[atname] = make_faust_record(attype)
elif isinstance(attype, str):
subklass = _import_relative_class(klass.__module__, attype)
annotations[atname] = make_faust_record(subklass)
else:
annotations[atname] = attype
return annotations
def make_faust_record(klass: Type):
type_name = f"{klass.__name__}{GENERATED_SUFFIX}"
record_type = type(type_name, (faust.Record, klass), {})
record_type.__annotations__ = _build_record_annotations(klass)
record_type._init_subclass()
return record_type
Now you can use it like:
import faust
from proto.your_models import YourModel # Import your generated proto here
from faust_converter import make_faust_record
app = faust.App(
'faust-consumer',
broker='kafka://',
store="memory://",
cache="memory://",
)
model_record = make_faust_record(YourModel)
topic = app.topic(
'topic',
value_type=model_record
)
#app.agent(topic)
async def consume(topic):
async for event in topic:
print(event)
if __name__ == "__main__":
app.main()
I was also experimenting with using Protobuf with Faust.
Mentioned below is the solution using Faust Serialiser Codecs.
faust-protobuf https://github.com/hemantkashniyal/faust-protobuf
proto_serializer.py
from faust.serializers import codecs
from typing import Any
from google.protobuf import json_format
from google.protobuf.json_format import MessageToJson
from google.protobuf.json_format import MessageToDict
from google.protobuf import text_format
from google.protobuf.text_format import MessageToString
from google.protobuf.text_format import MessageToBytes
class ProtobufSerializer(codecs.Codec):
def __init__(self, pb_type: Any):
self.pb_type = pb_type
super(self.__class__, self).__init__()
def _dumps(self, pb: Any) -> bytes:
return pb.SerializeToString()
def _loads(self, s: bytes) -> Any:
pb = self.pb_type()
pb.ParseFromString(s)
return pb
app.py
import faust
from google.protobuf.json_format import MessageToJson
from .proto.greetings_pb2 import Greeting
from .proto_serializer import ProtobufSerializer
app = faust.App(
'faust-consumer',
broker='kafka://', # TODO: update kafka endpoint
store="memory://",
cache="memory://",
)
greetings_schema = faust.Schema(
key_serializer=ProtobufSerializer(pb_type=Greeting),
value_serializer=ProtobufSerializer(pb_type=Greeting),
)
topic = app.topic(
'greetings',
schema=greetings_schema
)
#app.agent(topic)
async def consume(topic):
async for event in topic:
print(MessageToJson(event))
#app.timer(5)
async def produce():
for i in range(10):
data = Greeting(hello="world", message=i)
await consume.send(value=data)
if __name__ == "__main__":
app.main()
I was able to do it by creating a Serializer class as so:
import faust
from abc import ABCMeta, abstractmethod
from google.protobuf.json_format import MessageToDict
from faust.serializers.codecs import Codec
from importlib import import_module
def get_proto(topic_name, only_pk=False):
if not hasattr(get_proto, "topics"):
setattr(get_proto, "topics", dict())
get_proto.topics[topic_name] = import_module(
"protodef.{}_pb2".format(topic_name)
).__getattribute__(topic_name.split(".")[-1])
if only_pk:
return getattr(get_proto, "topics").get(topic_name).PK
else:
return getattr(get_proto, "topics").get(topic_name)
class ProtoSerializer(Codec, metaclass=ABCMeta):
#abstractmethod
def only_key(self):
...
def as_proto(self, topic_name):
self._proto = get_proto(topic_name, self.only_key())
return self
def _loads(self, b):
data = MessageToDict(
self._proto.FromString(b),
preserving_proto_field_name=True,
including_default_value_fields=True,
)
# remove the key object from the unserialized message
data.pop("key", None)
return data
def _dumps(self, o):
# for deletes
if not o:
return None
obj = self._proto()
# add the key object to them message before serializing
if hasattr(obj, "PK"):
for k in obj.PK.DESCRIPTOR.fields_by_name.keys():
if k not in o:
raise Exception(
"Invalid object `{}` for proto `{}`".format(o, self._proto)
)
setattr(obj.key, k, o[k])
for k, v in o.items():
if hasattr(obj, k):
setattr(obj, k, v)
else:
ghost.debug(
"Invalid value-attribute `%s` for proto `%s`", k, self._proto
)
return obj.SerializeToString()
class ProtoValue(ProtoSerializer):
def only_key(self):
return False
class ProtoKey(ProtoSerializer):
def only_key(self):
return True
and then use it as follows:
import faust
from utils.serializer import ProtoKey, ProtoValue
app = faust.App(
'faust-consumer',
broker='kafka://',
store="memory://",
cache="memory://",
)
topic = app.topic(
'topic',
key_serializer=ProtoKey().as_proto('topic'),
value_serializer=ProtoValue().as_proto('topic')
)
#app.agent(topic)
async def consume(topic):
async for event in topic:
print(event)
if __name__ == "__main__":
app.main()

Is there a problem while creating setUp() function in my Unit test cases?

I am writing unit test cases for my app.py file. I have created a setUp() function in my test file but the moment I execute any test case, it throws an error like
============================= test session starts ==============================
platform linux -- Python 3.6.9, pytest-6.1.1, py-1.9.0, pluggy-0.13.1 -- /home/curiousguy/PycharmProjects/leadgen/venv/bin/python
cachedir: .pytest_cache
rootdir: /home/curiousguy/PycharmProjects/leadgen/tests
plugins: cov-2.10.1
collecting ... ENV :None
test_app.py:None (test_app.py)
test_app.py:5: in <module>
from app import app
../app.py:7: in <module>
from appli import appli
../appli.py:6: in <module>
appli = create_app(db, config_name)
../app_setup.py:21: in create_app
leadgen_app.config.from_object(app_config[config_name])
E KeyError: None
collected 0 items / 1 error
==================================== ERRORS ====================================
_________________________ ERROR collecting test_app.py _________________________
test_app.py:5: in <module>
from app import app
../app.py:7: in <module>
from appli import appli
../appli.py:6: in <module>
appli = create_app(db, config_name)
../app_setup.py:21: in create_app
leadgen_app.config.from_object(app_config[config_name])
E KeyError: None
The test file is:
import unittest
from unittest.mock import patch
import pytest
# from appli import appli
from app import app
#import app
class MyApp(unittest.TestCase):
def setUp(self):
app.testing = True
self.client = app.test_client()
def tearDown(self):
pass
def test_settings_passed(self):
response = self.client.get('/settings', follow_redirects=True)
self.assertEqual(response.status_code, 200)
with pytest.raises(AssertionError) as wt:
self.assertEqual(response.status_code, 403)
Since the errors are pointing to different files I am adding those files as well.
app.py
import functools
import pickle
from flask import (redirect, render_template, request, Response, session, url_for)
from flask_login import LoginManager, login_user, current_user
from flask_admin import Admin
from ldap3 import Server, Connection, ALL, SIMPLE
from appli import appli
import datetime
from modules.controller import application
from modules.users_view import MyAdminIndexView, UsersView
from database.db_model import (Companies, Users, Leads, db)
###########################################################
# Init section #
###########################################################
app = appli
login_manager = LoginManager()
login_manager.login_view = "login"
login_manager.init_app(app)
server = Server(app.config['LDAP_SERVER'],
port=app.config['LDAP_PORT'], get_info=ALL)
server_tor = Server(app.config['LDAP_SERVER_TOR'],
port=app.config['LDAP_PORT'], get_info=ALL)
admin = Admin(app, name='LEADGEN Admin', index_view=MyAdminIndexView(), base_template='master.html')
admin.add_view(UsersView(Users, db.session))
application_inst = application("Lead Generator")
#rest of code
appli.py
import os
from app_setup import create_app
from database.db_model import db
config_name = os.getenv('FLASK_ENV')
appli = create_app(db, config_name)
if __name__ == '__main__':
appli.run()
app_Setup.py
def create_app(db,config_name):
leadgen_app = Flask(__name__, instance_relative_config=True)
# config_name = os.getenv('FLASK_ENV', 'default')
print('ENV :' + str(config_name))
# leadgen_app.config.from_object(eval(settings[config_name]))
leadgen_app.config.from_object(app_config[config_name])
leadgen_app.config.from_pyfile('config.cfg', silent=True)
# Configure logging
leadgen_app.logger.setLevel(leadgen_app.config['LOGGING_LEVEL'])
handler = logging.FileHandler(leadgen_app.config['LOGGING_LOCATION'])
handler.setLevel(leadgen_app.config['LOGGING_LEVEL'])
formatter = logging.Formatter(leadgen_app.config['LOGGING_FORMAT'])
handler.setFormatter(formatter)
leadgen_app.logger.addHandler(handler)
leadgen_app.logger.propagate = 0
# Configure sqlalchemy
leadgen_app.app_context().push()
db.init_app(leadgen_app)
# with leadgen_app.app_context():
# db.create_all()
from leads.leads_bp import leads_bp
from process.process_bp import process_bp
from CAAPI.caapi_bp import caAPI_bp
leadgen_app.register_blueprint(leads_bp)
leadgen_app.register_blueprint(process_bp)
leadgen_app.register_blueprint(caAPI_bp)
return leadgen_app
Where am I making a mistake to run my test case successfully?
The KeyError is caused by app_config[config_name].
config_name comes from config_name = os.getenv('FLASK_ENV').
getenv defaults to None when no value is set, see https://docs.python.org/3/library/os.html#os.getenv
This means you have to set the environment variable in order to make tests pass.
You could also debug your application with pdb - I gave a lightning talk how to debug a Flask application...
https://www.youtube.com/watch?v=Fxkco-gS4S8&ab_channel=PythonIreland
So I figured out an answer to this. here is what I did.
I made changes to my setUp() function in my testcase file
def setUp(self):
self.app = create_app(db)
self.client = self.app.test_client(self)
with self.app.app_context():
# create all tables
db.create_all()
and then I imported from app_setup import create_app in my testcase file. And finally I made changes in app_setup.py in function
def create_app(db, config_name='default')
And my testcases are now running.

How to start another thread in request handling thread with Flask?

First of all, I have tried looking for answers in this website. But no luck...
What I wanna achieve is that starting an independent thread in the request handling thread to do some asynchronous task. The tricky point is that there are some database operations needed in this independent thread.
Here is an example. Five files included.
project
|__manager.py
|__config.py
|__deployer
|__`__init__.py`
|__models.py
|__views.py
|__operators.py
Detail code below...
# deployer/__init__.py
from flask import Flask
from deployer.models import db
def create_app():
app = Flask(__name__)
app.config.from_object(object_name)
db.init_app(app)
# Add route for index
#app.route('/')
def index():
return {'code': 200, 'message': 'OK'}
return app
# manager.py
from os import environ
from flask_script import Manager, Server
from deployer import create_app
from flask_restful import Api
from deployer.views import HostView
env = environ.get('APM_ENV', 'dev')
app = create_app('config.%sConfig' % env.capitalize())
api = Api(app)
api.add_resource(HostView, '/api/v1/hosts')
manager = Manager(app)
manager.add_command("server", Server(host='0.0.0.0', port=9527))
if __name__ == '__main__':
manager.run(default_command='server')
# deployer/views.py
from flask_restful import Resource, reqparse
from flask import jsonify
from deployer.models import db, Host
from deployer.operators import HostInitiator
parser = reqparse.RequestParser()
parser.add_argument('host', type=int, help='Specify an unique host.')
class HostView(Resource):
def get(self):
h = db.session.query(Host).filter(Host.id == 1).one()
return jsonify(
host_id=h.id,
host_code=h.code,
host_ip=h.ip_addr_v4
)
def post(self):
h = Host(
code='Harbor',
ip_addr_v4='10.10.10.199',
state='created'
)
db.session.add(h)
db.session.commit()
initiator = HostInitiator(host=h)
initiator.start()
return {
'code': 'Harbor',
'ip_addr_v4': '10.10.10.199',
'state': 'created'
}
# deployer/models.py
from sqlalchemy import Column, Integer, String
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
class Host(db.Model):
__tablename__ = 'br_host'
id = Column(Integer, primary_key=True, autoincrement=True)
code = Column(String(128), index=True, nullable=False)
ip_addr_v4 = Column(String(15), nullable=False)
state = Column(String(16), nullable=False)
# deployer/operators.py
from threading import Thread
from deployer.models import db
class HostInitiator(Thread):
def __init__(self, host):
super().__init__()
self.host = host
def run(self):
# Update Host.state [created-->initating]
db.session.query(Host).filter(Host.id == self.host.id).update({'state': 'initating'})
db.session.commit()
# do some initiating things...
# Update Host.state [initating-->ready]
db.session.query(Host).filter(Host.id == self.host.id).update({'state': 'ready'})
db.session.commit()
Always got outside application context error with code above. The error message indicates that no database operation is permitted in the HostInitiator thread.
It suggests me to push a context or move my code into a view function. I'm suffering this quite a while, please help out if you guys have any suggestions. Thanks in advance.
The code works for me
def test_multi_threading_query():
# module which i create Flask app instance
from app.main import app
# module which i create sqlalchemhy instance
from app.model.db import db, Post
with app.app_context():
posts = Post.query.all()
p = posts[0]
p.foo = 1
db.session.add(p)
db.session.commit()
print(p)
#api.route('/test')
def test_view():
from threading import Thread
t = Thread(target=test_multi_threading_query)
t.start()
return ''
# main.py
app = Flask(__main__)
#db.py
db = SQLAlchemy()
class Post(db.Model):
id = db.Column(db.Integer, primary_key=True)
foo = db.Column(db.Integer)
https://flask.palletsprojects.com/en/1.1.x/appcontext/

importing variable from main file to class variable

I have two files. One is a main python file. I am using flask where I am initializing a variable called cache using flask cache
from flask import *
from flask_compress import Compress
from flask_cors import CORS
from local_service_config import ServiceConfiguration
from service_handlers.organization_handler import *
import configparser
import argparse
import os
import redis
import ast
from flask_cache import Cache
app = flask.Flask(__name__)
config = None
configured_service_handlers = {}
app.app_config = None
ug = None
#app.route('/organizations', methods=['POST', 'GET'])
#app.route('/organizations/<id>', methods=['DELETE', 'GET', 'PUT'])
def organizations(id=None):
try:
pass
except Exception as e:
print(e)
def load_configuration():
global config
configfile = "jsonapi.cfg" # same dir as this file
parser = argparse.ArgumentParser(
description='Interceptor for UG and backend services.')
parser.add_argument('--config', required=True, help='name of config file')
args = parser.parse_args()
configfile = args.config
print("Using {} as config file".format(configfile))
config = configparser.ConfigParser()
config.read(configfile)
return config
if __name__ == "__main__":
config = load_configuration()
serviceconfig = ServiceConfiguration(config)
serviceconfig.setup_services()
ug = serviceconfig.ug
cache = Cache(app, config={
'CACHE_TYPE': 'redis',
'CACHE_KEY_PREFIX': 'fcache',
'CACHE_REDIS_HOST':'{}'.format(config.get('redis', 'host'),
'CACHE_REDIS_PORT':'{}'.format(config.get('redis', 'port'),
'CACHE_REDIS_URL': 'redis://{}:{}'.format(
config.get('redis', 'host'),
config.get('redis', 'port')
)
})
# configure app
port = 5065
if config.has_option('service', 'port'):
port = config.get('service', 'port')
host = '0.0.0.0'
if config.has_option('service', 'host'):
host = config.get('service', 'host')
app.config["port"] = port
app.config["host"] = host
app.config["APPLICATION_ROOT"] = 'app'
app.run(port=port, host=host)
And one more handler which has a class
class OrganizationHandler(UsergridHandler):
def __init__(self, config, test_ug=None):
super(OrganizationHandler, self).__init__(config, ug=test_ug)
#cache.memoize(60)
def get_all_children_for_org(self, parent, all):
try:
temp = []
children = self.ug.collect_entities(
"/organizations/{}/connecting/parent/organizations".format(parent)
)
if not len(children):
return
for x in children:
temp.append(x['uuid'])
all += temp
for each in temp:
self.get_all_children_for_org(each, all)
return all
except Exception as e:
print(e)
I want to import the cache variable defined in main function to be usable as #cache.memoize inside the class. How do I import that variable inside the class?
You can create your Cache instance in a separate module (fcache.py):
from flask_cache import Cache
cache = Cache()
After that you can configure it in main file:
from flask import Flask
from fcache import cache
app = Flask(__name__)
cache.init_app(app, config={'CACHE_TYPE': 'redis'})
Cache instance can be imported in other modules:
from fcache import cache
#cache.memoize(60)
def get_value():
return 'Value'
This approach could be also used with other Flask extensions like Flask-SQLAlchemy.

Resources