Creating CustomConnector connection (GoogleAds) using AWS Lambda | AppFlow - python-3.x

Does Boto3 client support connectors for GoogleAds and FacebookAds? According to documentation we can use Custom Connector but when i try to use it in the code i get the below error saying it should be one of the built in types.
[ERROR] ParamValidationError: Parameter validation failed:
Unknown parameter in connectorProfileConfig.connectorProfileProperties: "CustomConnector", must be one of: Amplitude, Datadog, Dynatrace, GoogleAnalytics, Honeycode, InforNexus, Marketo, Redshift, Salesforce, ServiceNow, Singular, Slack, Snowflake, Trendmicro, Veeva, Zendesk, SAPOData
Unknown parameter in connectorProfileConfig.connectorProfileCredentials: "CustomConnector", must be one of: Amplitude, Datadog, Dynatrace, GoogleAnalytics, Honeycode, InforNexus, Marketo, Redshift, Salesforce, ServiceNow, Singular, Slack, Snowflake, Trendmicro, Veeva, Zendesk, SAPOData
Traceback (most recent call last):
  File "/var/task/lambda_function.py", line 34, in lambda_handler
    response = client.create_connector_profile(
  File "/var/runtime/botocore/client.py", line 391, in _api_call
    return self._make_api_call(operation_name, kwargs)
  File "/var/runtime/botocore/client.py", line 691, in _make_api_call
    request_dict = self._convert_to_request_dict(
  File "/var/runtime/botocore/client.py", line 739, in _convert_to_request_dict
    request_dict = self._serializer.serialize_to_request(
  File "/var/runtime/botocore/validate.py", line 360, in serialize_to_request
    raise ParamValidationError(report=report.generate_report())
Code in Lambda :
import json
import boto3
def lambda_handler(event, context):
client = boto3.client('appflow')
### Google Ads
response = client.create_connector_profile(
connectorProfileName='GoogleAdsConn',
connectorType='CustomConnector',
# connectorLabel='GoogleAds',
connectionMode='Public',
connectorProfileConfig= {
"connectorProfileProperties": {
'CustomConnector': {
# 'profileProperties': {
# 'string': 'string'
# },
'oAuth2Properties': {
'tokenUrl': 'https://oauth2.googleapis.com/token',
'oAuth2GrantType': 'AUTHORIZATION_CODE'
# ,'tokenUrlCustomProperties': {
# 'string': 'string'
# }
}
}
},
"connectorProfileCredentials": {
"CustomConnector": {
"authenticationType": "OAUTH2",
"oauth2": {
"accessToken": "myaccesstoken",
"clientId": "myclientid",
"clientSecret": "myclientsecret",
"oAuthRequest": {
"authCode": "string",
"redirectUri": "string"
},
"refreshToken": "myrefreshtoken"
}
}
}
}
)
return {
'response': response
}
Any leads on this will be appreciated.
Thanks!

The issue was with older boto3 version. Adding a new lambda layer with latest boto3 version(1.24.70) and updating code with profileProperties it worked seamlessly. Below is the complete working code.
import json
import boto3
def lambda_handler(event, context):
client = boto3.client('appflow')
### Google Ads
response = client.create_connector_profile(
connectorProfileName='GoogleAdsConnThruLambda',
connectorType='CustomConnector',
connectorLabel='GoogleAds',
connectionMode='Public',
connectorProfileConfig= {
"connectorProfileProperties": {
'CustomConnector': {
'profileProperties': {
'developerToken': 'developerToken',
'instanceUrl':'https://googleads.googleapis.com',
'managerID':'managerID',
'apiVersion':'v11'
},
'oAuth2Properties': {
'tokenUrl': 'https://oauth2.googleapis.com/token',
'oAuth2GrantType': 'AUTHORIZATION_CODE'
# ,'tokenUrlCustomProperties': {
# "string":"string"
# }
}
}
},
"connectorProfileCredentials": {
"CustomConnector": {
"authenticationType": "OAUTH2",
"oauth2": {
"accessToken": "accessToken",
"clientId": "clientId",
"clientSecret": "clientSecret",
"oAuthRequest": {
"authCode": "authCode",
"redirectUri": "https://<your_region>.console.aws.amazon.com/appflow/oauth"
},
"refreshToken": "refreshToken"
}
}
}
}
)
return {
'response': response
}

Related

Spark error class java.util.HashMap cannot be cast to class java.lang.String

I have the following error when I tried to get data from elasticsearch by spark. The error don't specify where is the error.
The body2 works in dev tools in elasticsearch
from pyspark import SparkContext
from pyspark.sql import SQLContext
from pyspark import SparkConf
from pyspark.sql import SparkSession
body2={
"query": {
"bool": {
"must": [
{
"range": {
"#timestamp": {
"lte": "2022-05-03T09:25:15.000-03:00",
"gte": "2022-05-04T09:25:15.000-03:00"
}
}
},
{
"match": {
"type.keyword": "TABLA"
}
}
]
}
},
"size":10
}
es_read_conf = {
"es.nodes": "10.45.15.93",
"es.port": "9200",
"es.query": body2,
"es.nodes.wan.only": "true",
"es.resource" : "indice1/TABLA",
"es.net.http.auth.user": "usuario1",
"es.net.http.auth.pass": "rsl242442j"
}
es_rdd = sc.newAPIHadoopRDD(
inputFormatClass="org.elasticsearch.hadoop.mr.EsInputFormat",
keyClass="org.apache.hadoop.io.NullWritable",
valueClass="org.elasticsearch.hadoop.mr.LinkedMapWritable",
conf=es_read_conf)
Here is the error and I don´t know where is the error in the code:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/opt/spark/python/pyspark/context.py", line 859, in newAPIHadoopRDD
jrdd = self._jvm.PythonRDD.newAPIHadoopRDD(self._jsc, inputFormatClass, keyClass,
File "/opt/spark/python/lib/py4j-0.10.9.3-src.zip/py4j/java_gateway.py", line 1321, in __call__
File "/opt/spark/python/pyspark/sql/utils.py", line 111, in deco
return f(*a, **kw)
File "/opt/spark/python/lib/py4j-0.10.9.3-src.zip/py4j/protocol.py", line 326, in get_return_value
py4j.protocol.Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.newAPIHadoopRDD.
: java.lang.ClassCastException: class java.util.HashMap cannot be cast to class java.lang.String (java.util.HashMap and java.lang.String are in module java.base of loader 'bootstrap')
at org.apache.spark.api.python.PythonHadoopUtil$.$anonfun$mapToConf$1(PythonHadoopUtil.scala:160)
at org.apache.spark.api.python.PythonHadoopUtil$.$anonfun$mapToConf$1$adapted(PythonHadoopUtil.scala:160)
at scala.collection.Iterator.foreach(Iterator.scala:943)
at scala.collection.Iterator.foreach$(Iterator.scala:943)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
...
I review the code and I'dont get the solution
Thanks for all
As the error message describes, the problem is that query should be a string and not a dictionary:
body2="""{
"query": {
"bool": {
"must": [
{
"range": {
"#timestamp": {
"lte": "2022-05-03T09:25:15.000-03:00",
"gte": "2022-05-04T09:25:15.000-03:00"
}
}
},
{
"match": {
"type.keyword": "TABLA"
}
}
]
}
},
"size":10
}"""
You can see a reference for that here

Lambda error "TypeError: 'str' object does not support item assignment Traceback"

I am trying out
import json
import uuid
import boto3
def lambda_handler(event, context):
dynamo_client = boto3.resource('dynamodb')
loadeo_carrier_company = dynamo_client.Table('loadeo_carrier_company')
item = {}
item = event['body']
print(item)
item['company_id'] = str(uuid.uuid4())
print (type(item))
try:
loadeo_carrier_company.put_item(
Item=item
)
return {
"statusCode": 200,
"headers" : {
"Access-Control-Allow-Origin" : "*"
},
"message": json.dumps("Record has been inserted"),
"body": item
}
except Exception as e:
return {
"statusCode": 500,
"headers" : {
"Access-Control-Allow-Origin" : "*"
},
"message": "Error: Unable to save record!"
}
This code.
When I test it through Lambda Test event it is working fine.
But when I create an API and try it out with postman showes an internal server error but when I look at the cloud watch logs The below error is shown.
[ERROR] TypeError: 'str' object does not support item assignment
Traceback (most recent call last):
File "/var/task/lambda_function.py", line 19, in lambda_handler
item['company_id'] = str(uuid.uuid4())
NOTE:
The Lambda test event is working fine.
"body": {
"company_name" : "waveaxis pvt ltd",
"mc_number" : "00000",
"email_adderess": "waveaxis#waveaxis.co.in",
"phone_number" : "+91 1234567890",
"company_address" : "Kormangala, Bengaluru"
},
After Trying the answer:
When I test it from lambda test event:
{
"errorMessage": "the JSON object must be str, bytes or bytearray, not dict",
"errorType": "TypeError",
"stackTrace": [
" File \"/var/task/lambda_function.py\", line 17, in lambda_handler\n item = json.loads(event['body'])\n",
" File \"/var/lang/lib/python3.8/json/__init__.py\", line 341, in loads\n raise TypeError(f'the JSON object must be str, bytes or bytearray, '\n"
]
}
and when I call it from postman.
Execution stops before try block with no error
Edit 2: print the json for event
{
"resource":"/",
"path":"/",
"httpMethod":"PUT",
"headers":{
"Accept":"*/*",
"Accept-Encoding":"gzip, deflate, br",
"Authorization":"Bearer eyJraWQiOiJCUUl6ZkxcL1VOdm9QTDVxNExlWGFRNXNxOG1mVmhmXC9rK3ZJUDdYc0p0VjQ9IiwiYWxnIjoiUlMyNTYifQ.eyJzdWIiOiJkYjdkODBmOC1mY2VhLTQwYjItYTZhYi1jMjhhNTZiMTI1NDIiLCJ0b2tlbl91c2UiOiJhY2Nlc3MiLCJzY29wZSI6ImNhcnJpZXJcL3JlYWQgY2FycmllclwvdXBkYXRlIGNhcnJpZXJcL2RlbGV0ZSIsImF1dGhfdGltZSI6MTYwMjA0NDQ5NywiaXNzIjoiaHR0cHM6XC9cL2NvZ25pdG8taWRwLnVzLWVhc3QtMS5hbWF6b25hd3MuY29tXC91cy1lYXN0LTFfWW5nVnJxYUFGIiwiZXhwIjoxNjAyMDQ4MDk3LCJpYXQiOjE2MDIwNDQ0OTcsInZlcnNpb24iOjIsImp0aSI6ImExMzg4ZGUyLTRhZWQtNGI2MC04YjM0LWYzN2I1N2RjM2ZmMiIsImNsaWVudF9pZCI6IjVlZzU1NWhpNzAwZ21lbWc3N3B0NWkzbDIyIiwidXNlcm5hbWUiOiJHb29nbGVfMTEyNjUyMTUzMDI4OTQyNjAzNDM5In0.XMy9GP03o5EYrcLtQFzrMV6KID4IlDI_n0WrHa8osY_7CeeDjaCjH6Dtr766TAommLUzcLoKt-NrBUdq0Zfx-BL919j25rwiZXJbHiZP_4y9n891ddOXfPabO7n8O84-63W6l13QEBozuc21vXi7vuE_dSJ7KAgute46KP3LyoS73WPDhYim_7HZJO8pVedk64hhGNZsYWv6VU5QeQyqPl926spA25ZBo_z5dcoBnMZ_i2n5nz6qxRcINOKfMXL1f4_nDRbtKb5Pd33hKnsKYLkxEI0mrT1JKPJhkJRg9vGqaKcd13oqrigJRFSXYuVQuKNDluc38KbQJcwUoXDjjA",
"Content-Type":"application/json",
"Host":"661ny3iw92.execute-api.us-east-2.amazonaws.com",
"Postman-Token":"654cfdd0-8080-48a5-8758-3aa7e5bcefcc",
"User-Agent":"PostmanRuntime/7.26.5",
"X-Amzn-Trace-Id":"Root=1-5fa13860-63da2718701bf3fc458c78e1",
"X-Forwarded-For":"157.49.141.105",
"X-Forwarded-Port":"443",
"X-Forwarded-Proto":"https"
},
"multiValueHeaders":{
"Accept":[
"*/*"
],
"Accept-Encoding":[
"gzip, deflate, br"
],
"Authorization":[
"Bearer eyJraWQiOiJCUUl6ZkxcL1VOdm9QTDVxNExlWGFRNXNxOG1mVmhmXC9rK3ZJUDdYc0p0VjQ9IiwiYWxnIjoiUlMyNTYifQ.eyJzdWIiOiJkYjdkODBmOC1mY2VhLTQwYjItYTZhYi1jMjhhNTZiMTI1NDIiLCJ0b2tlbl91c2UiOiJhY2Nlc3MiLCJzY29wZSI6ImNhcnJpZXJcL3JlYWQgY2FycmllclwvdXBkYXRlIGNhcnJpZXJcL2RlbGV0ZSIsImF1dGhfdGltZSI6MTYwMjA0NDQ5NywiaXNzIjoiaHR0cHM6XC9cL2NvZ25pdG8taWRwLnVzLWVhc3QtMS5hbWF6b25hd3MuY29tXC91cy1lYXN0LTFfWW5nVnJxYUFGIiwiZXhwIjoxNjAyMDQ4MDk3LCJpYXQiOjE2MDIwNDQ0OTcsInZlcnNpb24iOjIsImp0aSI6ImExMzg4ZGUyLTRhZWQtNGI2MC04YjM0LWYzN2I1N2RjM2ZmMiIsImNsaWVudF9pZCI6IjVlZzU1NWhpNzAwZ21lbWc3N3B0NWkzbDIyIiwidXNlcm5hbWUiOiJHb29nbGVfMTEyNjUyMTUzMDI4OTQyNjAzNDM5In0.XMy9GP03o5EYrcLtQFzrMV6KID4IlDI_n0WrHa8osY_7CeeDjaCjH6Dtr766TAommLUzcLoKt-NrBUdq0Zfx-BL919j25rwiZXJbHiZP_4y9n891ddOXfPabO7n8O84-63W6l13QEBozuc21vXi7vuE_dSJ7KAgute46KP3LyoS73WPDhYim_7HZJO8pVedk64hhGNZsYWv6VU5QeQyqPl926spA25ZBo_z5dcoBnMZ_i2n5nz6qxRcINOKfMXL1f4_nDRbtKb5Pd33hKnsKYLkxEI0mrT1JKPJhkJRg9vGqaKcd13oqrigJRFSXYuVQuKNDluc38KbQJcwUoXDjjA"
],
"Content-Type":[
"application/json"
],
"Host":[
"661ny3iw92.execute-api.us-east-2.amazonaws.com"
],
"Postman-Token":[
"654cfdd0-8080-48a5-8758-3aa7e5bcefcc"
],
"User-Agent":[
"PostmanRuntime/7.26.5"
],
"X-Amzn-Trace-Id":[
"Root=1-5fa13860-63da2718701bf3fc458c78e1"
],
"X-Forwarded-For":[
"157.49.141.105"
],
"X-Forwarded-Port":[
"443"
],
"X-Forwarded-Proto":[
"https"
]
},
"queryStringParameters":"None",
"multiValueQueryStringParameters":"None",
"pathParameters":"None",
"stageVariables":"None",
"requestContext":{
"resourceId":"529nsbfu6a",
"resourcePath":"/",
"httpMethod":"PUT",
"extendedRequestId":"VbW_FFkYCYcF3PQ=",
"requestTime":"03/Nov/2020:11:00:48 +0000",
"path":"/dev",
"accountId":"272075499248",
"protocol":"HTTP/1.1",
"stage":"dev",
"domainPrefix":"661ny3iw92",
"requestTimeEpoch":1604401248435,
"requestId":"970dd9d2-9b35-45c5-b194-806060e27d10",
"identity":{
"cognitoIdentityPoolId":"None",
"accountId":"None",
"cognitoIdentityId":"None",
"caller":"None",
"sourceIp":"157.49.141.105",
"principalOrgId":"None",
"accessKey":"None",
"cognitoAuthenticationType":"None",
"cognitoAuthenticationProvider":"None",
"userArn":"None",
"userAgent":"PostmanRuntime/7.26.5",
"user":"None"
},
"domainName":"661ny3iw92.execute-api.us-east-2.amazonaws.com",
"apiId":"661ny3iw92"
},
"body":"{\r\n \"company_name\": \"waveaxis pvt ltd\",\r\n \"mc_number\": \"00000\",\r\n \"email_adderess\": \"waveaxis#waveaxis.co.in\",\r\n \"phone_number\": \"+91 1234567890\",\r\n \"company_address\": \"Kormangala, Bengaluru\"\r\n}\r\n",
"isBase64Encoded":False
}
Your event['body'] is probably just a json string, not actual json, from what I remember.
Thus, instead of
item = event['body']
you can use
item = json.loads(event['body'])
which should parse json string into json object.
Update
Based on the posted form of the event, you should use ast, not json
import ast
item = ast.literal_eval(event['body'])

Cannot create ResourceGroup using boto3: Query format not valid

I am trying to create a resource group using the following boto3 snippet:
kwargs = {
'Name': 'cluster.foo.io',
'Description': 'AWS resources assigned to the foo cluster.',
'ResourceQuery': {
'Type': 'TAG_FILTERS_1_0',
'Query': '[{"Key": "foo.io/cluster", "Values": ["cluster.foo.io"]}]',
},
'Tags': {
'foo.io/cluster': 'cluster.foo.io'
}
}
client = boto3.client("resource-groups")
resp = client.create_group(**kwargs)
But I'm getting the following error:
File "/Users/benjamin/.pyenv/versions/3.7.3/Python.framework/Versions/3.7/lib/python3.7/site-packages/botocore/client.py", line 357, in _api_call
return self._make_api_call(operation_name, kwargs)
File "/Users/benjamin/.pyenv/versions/3.7.3/Python.framework/Versions/3.7/lib/python3.7/site-packages/botocore/client.py", line 661, in _make_api_call
raise error_class(parsed_response, operation_name)
botocore.errorfactory.BadRequestException: An error occurred (BadRequestException)
when calling the CreateGroup operation: Query not valid:
Query format not valid: check JSON syntax
I keep comparing the Query to the example in the documentation but either I'm not seeing a difference or I'm way off in left field. I even used the json module as follows:
resp = self.resource_client.create_group(
Name='cluster.foo.io',
Description="AWS resources assigned to the foo cluster",
ResourceQuery={
"Type": "TAG_FILTERS_1_0",
"Query": json.dumps([{"Key": "foo.io/cluster", "Values": ["cluster.foo.io"]}]),
},
Tags={
"foo.io/cluster": "cluster.foo.io",
},
)
Any help would be appreciated!
The query parameter is missing ResourceTypeFilters and TagFilters. So, ResourceQuery should look like this:
'ResourceQuery': {
'Type': 'TAG_FILTERS_1_0',
'Query': "{\"ResourceTypeFilters\": [\"AWS::AllSupported\"], \"TagFilters\": [{\"Key\": \"foo.io/cluster\", \"Values\": [\"cluster.foo.io\"]}]}"
}
So, your code should be replaced as follows (the main section to be replaced is ResourceQuery:
query = {
"ResourceTypeFilters": ["AWS::AllSupported"],
"TagFilters": [{
"Key": "foo.io/cluster",
"Values": ["cluster.foo.io"]
}]
}
resource_query = {
'Type': 'TAG_FILTERS_1_0',
'Query': json.dumps(query)
}
kwargs = {
'Name': 'cluster.foo.io',
'Description': 'AWS resources assigned to the foo cluster.',
'ResourceQuery': resource_query,
'Tags': {
'foo.io/cluster': 'cluster.foo.io'
}
}
client = boto3.client("resource-groups")
resp = client.create_group(**kwargs)
I referred the example CLI shown here.

How to create kubernetes cluster in google cloud platform in python using google-cloud-container module

I'm trying to create kubernetes cluster on google cloud platform through python (3.7) using google-cloud-container module.
Created kubernetes cluster through google cloud platform and was able to successfully retrieve details for that cluster using google-cloud container (python module).
I'm trying now to create kubernetes cluster through this module. I created a JSON file with required key values and passed it as parameter, but i'm getting errors. Would appreciate if provided a sample code for creating kubernetes cluster in google cloud platform. Thank you in advance.
from google.oauth2 import service_account
from google.cloud import container_v1
class GoogleCloudKubernetesClient(object):
def __init__(self, file, project_id, project_name, zone, cluster_id):
credentials = service_account.Credentials.from_service_account_file(
filename=file)
self.client = container_v1.ClusterManagerClient(credentials=credentials)
self.project_id = project_id
self.zone = zone
def create_cluster(self, cluster):
print(cluster)
response = self.client.create_cluster(self.project_id, self.zone, cluster=cluster)
print(f"response for cluster creation: {response}")
def main():
cluster_data = {
"name": "test_cluster",
"masterAuth": {
"username": "admin",
"clientCertificateConfig": {
"issueClientCertificate": True
}
},
"loggingService": "logging.googleapis.com",
"monitoringService": "monitoring.googleapis.com",
"network": "projects/abhinav-215/global/networks/default",
"addonsConfig": {
"httpLoadBalancing": {},
"horizontalPodAutoscaling": {},
"kubernetesDashboard": {
"disabled": True
},
"istioConfig": {
"disabled": True
}
},
"subnetwork": "projects/abhinav-215/regions/us-west1/subnetworks/default",
"nodePools": [
{
"name": "test-pool",
"config": {
"machineType": "n1-standard-1",
"diskSizeGb": 100,
"oauthScopes": [
"https://www.googleapis.com/auth/cloud-platform"
],
"imageType": "COS",
"labels": {
"App": "web"
},
"serviceAccount": "abhinav#abhinav-215.iam.gserviceaccount.com",
"diskType": "pd-standard"
},
"initialNodeCount": 3,
"autoscaling": {},
"management": {
"autoUpgrade": True,
"autoRepair": True
},
"version": "1.11.8-gke.6"
}
],
"locations": [
"us-west1-a",
"us-west1-b",
"us-west1-c"
],
"resourceLabels": {
"stage": "dev"
},
"networkPolicy": {},
"ipAllocationPolicy": {},
"masterAuthorizedNetworksConfig": {},
"maintenancePolicy": {
"window": {
"dailyMaintenanceWindow": {
"startTime": "02:00"
}
}
},
"privateClusterConfig": {},
"databaseEncryption": {
"state": "DECRYPTED"
},
"initialClusterVersion": "1.11.8-gke.6",
"location": "us-west1-a"
}
kube = GoogleCloudKubernetesClient(file='/opt/key.json', project_id='abhinav-215', zone='us-west1-a')
kube.create_cluster(cluster_data)
if __name__ == '__main__':
main()
Actual Output:
Traceback (most recent call last):
File "/opt/matilda_linux/matilda_linux_logtest/matilda_discovery/matilda_discovery/test/google_auth.py", line 118, in <module>
main()
File "/opt/matilda_linux/matilda_linux_logtest/matilda_discovery/matilda_discovery/test/google_auth.py", line 113, in main
kube.create_cluster(cluster_data)
File "/opt/matilda_linux/matilda_linux_logtest/matilda_discovery/matilda_discovery/test/google_auth.py", line 31, in create_cluster
response = self.client.create_cluster(self.project_id, self.zone, cluster=cluster)
File "/opt/matilda_discovery/venv/lib/python3.6/site-packages/google/cloud/container_v1/gapic/cluster_manager_client.py", line 407, in create_cluster
project_id=project_id, zone=zone, cluster=cluster, parent=parent
ValueError: Protocol message Cluster has no "masterAuth" field.
Kind of late answer, but I had the same problem and figured it out. Worth writing for future viewers.
You should not write the field names in the cluster_data as they appear in the REST API.
Instead you should translate them to how they would look by python convention, with words separated by underline instead of camelcase.
Thus, instead of writing masterAuth, you should write master_auth. You should make similar changes to the rest of your fields, and then the script should work.
P.S you aren't using the project_name and cluster_id params in GoogleCloudKubernetesClient.init. Not sure what they are, but you should probably remove them.
The module is still using the basic REST API format to create the cluster. You can also use the GUI to choose all the options you want to use for your cluster, then press on the REST hyperlink at the bottom of the page, this will provide you with the REST format required to build the cluster you want.
The error you are getting is because you have a blank (or unspecified) field that must be specified. Some of the fields listed on the API have default values that you don't need, others are required.

Logging RotatingFileHandler not rotating

I have implemented a custom RotatingFileHandler:
class FreezeAwareFileHandler(RotatingFileHandler):
def emit(self, record):
try:
msg = self.format(record)
stream = self.stream
stream.write(msg)
stream.write('\n')
self.flush()
except (KeyboardInterrupt, SystemExit): #pragma: no cover
raise
except:
self.handleError(record)
And I have this configuration json file (I have tried even with yaml and specifying the configuration through the class methods):
{
"version": 1,
"disable_existing_loggers": false,
"formatters": {
"standard": {
"format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s",
"datefmt": "%Y-%m-%d %H:%M:%S"
}
},
"handlers": {
"freeze_aware_file_handler": {
"class": "Logging.MyHandler",
"formatter": "standard",
"level": "INFO",
"filename": "logs\\MyLog.log",
"maxBytes": 1024,
"backupCount": 10,
"encoding": "utf8"
}
},
"loggers": {
"my_module": {
"level": "INFO",
"handlers": ["my_handler"],
"propagate": "no"
}
},
"root": {
"level": "INFO",
"handlers": ["my_handler"]
}
}
And this is the code I use for initializing:
if os.path.exists(path):
with open(path, 'rt') as f:
config_json = json.load(f)
logging.config.dictConfig(config_json)
logger = logging.getLogger("my_handler")
I can normally log to the specified file but it is never rotating.
Someone know why I'm having this behavior?
I'm using Python 3.5
It turned out the problem was really simple.
For the rotating file handler it is needed to explicitly call the shouldRollver and doRollover methods in the emit of the base class.
def emit(self, record):
try:
if self.shouldRollover(record):
self.doRollover()
...
do any custom actions here
...
except:
self.handleError(record)

Resources