I am new to azure SubscriptionClient, I am trying to get the total message count from azure SubscriptionClient with python.
Please try something like the following:
from azure.servicebus import SubscriptionClient
conn_str = "Endpoint=sb://<service-bus-namespace-name>.servicebus.windows.net/;SharedAccessKeyName=RootManageSharedAccessKey;SharedAccessKey=access-key="
topic_name = "test"
subscription_name = "test"
client = SubscriptionClient.from_connection_string(conn_str, subscription_name, topic_name)
props = client.get_properties()
message_count = props['message_count']
print message_count
This worked for me:
from azure.servicebus.aio import ServiceBusClient
from azure.servicebus.management import ServiceBusAdministrationClient
number_of_messages_in_subscription = 0
CONNECTION_STR = "<your_connection_string>"
with ServiceBusAdministrationClient.from_connection_string(CONNECTION_STR) as servicebus_mgmt_client:
global number_of_messages_in_subscription
TOPIC_NAME = "<your_topic_name>"
SUBSCRIPTION_NAME = "<your_subscription_name>"
get_subscription_runtime_properties = servicebus_mgmt_client.get_subscription_runtime_properties(TOPIC_NAME, SUBSCRIPTION_NAME)
number_of_messages_in_subscription = get_subscription_runtime_properties.active_message_count
Source: https://github.com/Azure/azure-sdk-for-python/blob/1709ec7898c87e4369f5324302f274f254857dc3/sdk/servicebus/azure-servicebus/samples/async_samples/mgmt_subscription_async.py
Related
I am running a function in Jupyter notebook to test my connection to a Database
Here is the code:
from azure.identity import DefaultAzureCredential
from azure.appconfiguration import AzureAppConfigurationClient
from azure.keyvault.secrets import SecretClient
import requests
config_url = '#####'
credential = DefaultAzureCredential()
app_slug = 'XXXXX'
key = '*******'
label=None
key = '{}:{}'.format(app_slug, key)
config_client = AzureAppConfigurationClient(base_url=config_url, credential=credential)
fetched_config = config_client.get_configuration_setting(key=key, label=label)
fetched_config.value
It will connect for a coworker but not mine. I am wondering if anyone has run into this problem before and could help
The error seems to be in this line:
fetched_config = config_client.get_configuration_setting(key=key, label=label)
but I cannot understand why
All,
I modified the sample Receive python script for Azure EventHub a bit but when I run it goes into a loop fetching the same events over and over. I'm not sending any events to the eventhub since I want to read what is there and I dont see a while loop here so how is this happening and how do I stop after it reads all the events currently in the EventHub?
Thanks
grajee
# https://learn.microsoft.com/en-us/python/api/overview/azure/eventhub-readme?view=azure-python#consume-events-from-an-event-hub
import logging
from azure.eventhub import EventHubConsumerClient
connection_str = 'Endpoint=sb://testhubns01.servicebus.windows.net/;SharedAccessKeyName=getevents;SharedAccessKey=testtestest='
consumer_group = '$Default'
eventhub_name = 'testpart'
client = EventHubConsumerClient.from_connection_string(connection_str, consumer_group, eventhub_name=eventhub_name)
logger = logging.getLogger("azure.eventhub")
logging.basicConfig(level=logging.INFO)
def on_event(partition_context, event):
logger.info("Received event from partition: \"{}\" : \"{}\"" .format(partition_context.partition_id,event.body_as_str()))
partition_context.update_checkpoint(event)
with client:
client.receive(
on_event=on_event,
starting_position="-1", # "-1" is from the beginning of the partition.
)
# receive events from specified partition:
# client.receive(on_event=on_event, partition_id='0')
client.close()
The below piece of code from here makes it more clear .
import asyncio
from azure.eventhub.aio import EventHubConsumerClient
from azure.eventhub.extensions.checkpointstoreblobaio import BlobCheckpointStore
connection_str = '<< CONNECTION STRING FOR THE EVENT HUBS NAMESPACE >>'
consumer_group = '<< CONSUMER GROUP >>'
eventhub_name = '<< NAME OF THE EVENT HUB >>'
storage_connection_str = '<< CONNECTION STRING FOR THE STORAGE >>'
container_name = '<<NAME OF THE BLOB CONTAINER>>'
async def on_event(partition_context, event):
# do something
await partition_context.update_checkpoint(event) # Or update_checkpoint every N events for better performance.
async def receive(client):
await client.receive(
on_event=on_event,
starting_position="-1", # "-1" is from the beginning of the partition.
)
async def main():
checkpoint_store = BlobCheckpointStore.from_connection_string(storage_connection_str, container_name)
client = EventHubConsumerClient.from_connection_string(
connection_str,
consumer_group,
eventhub_name=eventhub_name,
**checkpoint_store=checkpoint_store, # For load balancing and checkpoint. Leave None for no load balancing**
)
async with client:
await receive(client)
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
I'm using Node JS to subscribe SNS on AWS and SQS to handle queues . How do I know if a file has been uploaded to S3 then sent the message to node js automatically via SNS? Sorry my English is not good
You can do that send notification using SNS when any file uploaded to s3
https://docs.aws.amazon.com/AmazonS3/latest/dev/NotificationHowTo.html
require 'aws-sdk-s3' # v2: require 'aws-sdk'
req = {}
req[:bucket] = bucket_name
events = ['s3:ObjectCreated:*']
notification_configuration = {}
# Add function
lc = {}
lc[:lambda_function_arn] = 'my-function-arn'
lc[:events] = events
lambda_configurations = []
lambda_configurations << lc
notification_configuration[:lambda_function_configurations] = lambda_configurations
# Add queue
qc = {}
qc[:queue_arn] = 'my-topic-arn'
qc[:events] = events
queue_configurations = []
queue_configurations << qc
notification_configuration[:queue_configurations] = queue_configurations
# Add topic
tc = {}
tc[:topic_arn] = 'my-topic-arn'
tc[:events] = events
topic_configurations = []
topic_configurations << tc
notification_configuration[:topic_configurations] = topic_configurations
req[:notification_configuration] = notification_configuration
req[:use_accelerate_endpoint] = false
s3 = Aws::S3::Client.new(region: 'us-west-2')
s3.put_bucket_notification_configuration(req)
you can refer this code also.
I used this code to export data into a csv file and it works:
project_id = 'project_id'
client = bigquery.Client()
dataset_id = 'dataset_id'
bucket_name = 'bucket_name'
table_id = 'table_id'
destination_uri = 'gs://{}/{}'.format(bucket_name, 'file.csv')
dataset_ref = client.dataset(dataset_id, project=project_id)
table_ref = dataset_ref.table(table_id)
extract_job = client.extract_table(
table_ref,
destination_uri)
extract_job.result()
But I prefer a GZ file because of my table up to 700M. Could anyone help me export data into a GZ file?
You need to add a jobConfig like in:
job_config = bigquery.job.ExtractJobConfig()
job_config.compression = 'GZIP'
Complete code:
from google.cloud import bigquery
client = bigquery.Client()
project_id = 'fh-bigquery'
dataset_id = 'public_dump'
table_id = 'afinn_en_165'
bucket_name = 'your_bucket'
destination_uri = 'gs://{}/{}'.format(bucket_name, 'file.csv.gz')
dataset_ref = client.dataset(dataset_id, project=project_id)
table_ref = dataset_ref.table(table_id)
job_config = bigquery.job.ExtractJobConfig()
job_config.compression = 'GZIP'
extract_job = client.extract_table(
table_ref,
destination_uri,
job_config = job_config
)
extract_job.result()
I'm trying to learn how to use the YahooApi, but when getting the data from the website, it gives me an internal server error. I have tried every combination of league or leagues data or even general game data, but everything is giving me an internal server error. I have attached my code below and any help I could receive would be very helpful.
import json
import time
import webbrowser
import pandas as pd
from pandas.io.json import json_normalize
from rauth import OAuth1Service
from rauth.utils import parse_utf8_qsl
credentials_file = open('auth.json')
credentials = json.load(credentials_file)
credentials_file.close()
oauth = OAuth1Service(consumer_key = 'key',
consumer_secret = 'secret',
name = "yahoo",
request_token_url = "https://api.login.yahoo.com/oauth/v2/get_request_token",
access_token_url = "https://api.login.yahoo.com/oauth/v2/get_token",
authorize_url = "https://api.login.yahoo.com/oauth/v2/request_auth",
base_url = "http://fantasysports.yahooapis.com/")
request_token, request_token_secret = oauth.get_request_token(params={"oauth_callback": "oob"})
authorize_url = oauth.get_authorize_url(request_token)
webbrowser.open(authorize_url)
verify = input('Enter code: ')
raw_access = oauth.get_raw_access_token(request_token,
request_token_secret,
params={"oauth_verifier": verify})
parsed_access_token = parse_utf8_qsl(raw_access.content)
access_token = (parsed_access_token['oauth_token'],
parsed_access_token['oauth_token_secret'])
start_time = time.time()
end_time = start_time + 3600
credentials['access_token'] = parsed_access_token['oauth_token']
credentials['access_token_secret'] = parsed_access_token['oauth_token_secret']
tokens = (credentials['access_token'], credentials['access_token_secret'])
s = oauth.get_session(tokens)
r = s.get('https://fantasysports.yahooapis.com/fantasy/v2/leagues;league_keys=nba.l.60379', params={'format': 'json'})
print(r.status_code)
r.json()
And that prints {u'error': {u'description': u'Internal server error', u'lang': u'en-US'}}
seems like this issues stems from Yahoo's side. One user reported trying OAuth2 authentication which seemed to work fine.
https://forums.yahoo.net/t5/Help-with-Fantasy-Baseball/Receiving-500-quot-Internal-Server-Error-quot-from-Yahoo-Fantasy/td-p/341427/page/4