messages=%5B%7B%22values%22%3A+%7B%22momentum%22%3A+%220.00%22%7D%2C+%22exchange%22%3A+%22binance%22%2C+%22market%22%3A+%22BNT%2FETH%22%2C+%22base_currency%22%3A+%22BNT%22%2C+%22quote_currency%22%3A+%22ETH%22%2C+%22indicator%22%3A+%22momentum%22%2C+%22indicator_number%22%3A+0%2C+%22analysis%22%3A+%7B%22config%22%3A+%7B%22enabled%22%3A+true%2C+%22alert_enabled%22%3A+true%2C+%22alert_frequency%22%3A+%22once%22%2C+%22signal%22%3A+%5B%22momentum%22%5D%2C+%22hot%22%3A+0%2C+%22cold%22%3A+0%2C+%22candle_period%22%3A+%224h%22%2C+%22period_count%22%3A+10%7D%2C+%22status%22%3A+%22hot%22%7D%2C+%22status%22%3A+%22hot%22%2C+%22last_status%22%3A+%22hot%22%2C+%22prices%22%3A+%22+Open%3A+0.000989+High%3A+0.000998+Low%3A+0.000980+Close%3A+0.000998%22%2C+%22lrsi%22%3A+%22%22%2C+%22creation_date%22%3A+%222020-05-10+16%3A16%3A23%22%2C+%22hot_cold_label%22%3A+%22%22%2C+%22indicator_label%22%3A+%22%22%2C+%22price_value%22%3A+%7B%22open%22%3A+0.000989%2C+%22high%22%3A+0.000998%2C+%22low%22%3A+0.00098%2C+%22close%22%3A+0.000998%7D%2C+%22decimal_format%22%3A+%22%25.6f%22%7D%2C+%7B%22values%22%3A+%7B%22leading_span_a%22%3A+%220.00%22%2C+%22leading_span_b%22%3A+%220.00%22%7D%2C+%22exchange%22%3A+%22binance%22%2C+%22market%22%3A+%22BNT%2FETH%22%2C+%22base_currency%22%3A+%22BNT%22%2C+%22quote_currency%22%3A+%22ETH%22%2C+%22indicator%22%3A+%22ichimoku%22%2C+%22indicator_number%22%3A+1%2C+%22analysis%22%3A+%7B%22config%22%3A+%7B%22enabled%22%3A+true%2C+%22alert_enabled%22%3A+true%2C+%22alert_frequency%22%3A+%22once%22%2C+%22signal%22%3A+%5B%22leading_span_a%22%2C+%22leading_span_b%22%5D%2C+%22hot%22%3A+true%2C+%22cold%22%3A+true%2C+%22candle_period%22%3A+%224h%22%2C+%22hot_label%22%3A+%22Bullish+Alert%22%2C+%22cold_label%22%3A+%22Bearish+Alert%22%2C+%22indicator_label%22%3A+%22ICHIMOKU+4+hr%22%2C+%22mute_cold%22%3A+false%7D%2C+%22status%22%3A+%22cold%22%7D%2C+%22status%22%3A+%22cold%22%2C+%22last_status%22%3A+%22cold%22%2C+%22prices%22%3A+%22+Open%3A+0.000989+High%3A+0.000998+Low%3A+0.000980+Close%3A+0.000998%22%2C+%22lrsi%22%3A+%22%22%2C+%22creation_date%22%3A+%222020-05-10+16%3A16%3A23%22%2C+%22hot_cold_label%22%3A+%22Bearish+Alert%22%2C+%22indicator_label%22%3A+%22ICHIMOKU+4+hr%22%2C+%22price_value%22%3A+%7B%22open%22%3A+0.000989%2C+%22high%22%3A+0.000998%2C+%22low%22%3A+0.00098%2C+%22close%22%3A+0.000998%7D%2C+%22decimal_format%22%3A+%22%25.6f%22%7D%2C+%7B%22values%22%3A+%7B%22bbp%22%3A+%220.96%22%2C+%22mfi%22%3A+%2298.05%22%7D%2C+%22exchange%22%3A+%22binance%22%2C+%22market%22%3A+%22BNT%2FETH%22%2C+%22base_currency%22%3A+%22BNT%22%2C+%22quote_currency%22%3A+%22ETH%22%2C+%22indicator%22%3A+%22bbp%22%2C+%22indicator_number%22%3A+1%2C+%22analysis%22%3A+%7B%22config%22%3A+%7B%22enabled%22%3A+true%2C+%22alert_enabled%22%3A+true%2C+%22alert_frequency%22%3A+%22once%22%2C+%22candle_period%22%3A+%224h%22%2C+%22period_count%22%3A+20%2C+%22hot%22%3A+0.09%2C+%22cold%22%3A+0.8%2C+%22std_dev%22%3A+2%2C+%22signal%22%3A+%5B%22bbp%22%2C+%22mfi%22%5D%2C+%22hot_label%22%3A+%22Lower+Band%22%2C+%22cold_label%22%3A+%22Upper+Band+BB%22%2C+%22indicator_label%22%3A+%22Bollinger+4+hr%22%2C+%22mute_cold%22%3A+false%7D%2C+%22status%22%3A+%22cold%22%7D%2C+%22status%22%3A+%22cold%22%2C+%22last_status%22%3A+%22cold%22%2C+%22prices%22%3A+%22+Open%3A+0.000989+High%3A+0.000998+Low%3A+0.000980+Close%3A+0.000998%22%2C+%22lrsi%22%3A+%22%22%2C+%22creation_date%22%3A+%222020-05-10+16%3A16%3A23%22%2C+%22hot_cold_label%22%3A+%22Upper+Band+BB%22%2C+%22indicator_label%22%3A+%22Bollinger+4+hr%22%2C+%22price_value%22%3A+%7B%22open%22%3A+0.000989%2C+%22high%22%3A+0.000998%2C+%22low%22%3A+0.00098%2C+%22close%22%3A+0.000998%7D%2C+%22decimal_format%22%3A+%22%25.6f%22%7D%5D
i need to convert this data in python3 to standard json for post json api
any solution ?
thanks
That looks like it's been URL form encoded.
Try
import urllib.parse
import json
# note **without** the message= part
stuff = "%5B%7B%22values%22%3A+%7B%22momentum%22%3A+%220.00%22%7D%2C+%22exchange%22%3A+%22binance%22%2C+%22market%22%3A+%22BNT%2FETH%22%2C+%22base_currency%22%3A+%22BNT%22%2C+%22quote_currency%22%3A+%22ETH%22%2C+%22indicator%22%3A+%22momentum%22%2C+%22indicator_number%22%3A+0%2C+%22analysis%22%3A+%7B%22config%22%3A+%7B%22enabled%22%3A+true%2C+%22alert_enabled%22%3A+true%2C+%22alert_frequency%22%3A+%22once%22%2C+%22signal%22%3A+%5B%22momentum%22%5D%2C+%22hot%22%3A+0%2C+%22cold%22%3A+0%2C+%22candle_period%22%3A+%224h%22%2C+%22period_count%22%3A+10%7D%2C+%22status%22%3A+%22hot%22%7D%2C+%22status%22%3A+%22hot%22%2C+%22last_status%22%3A+%22hot%22%2C+%22prices%22%3A+%22+Open%3A+0.000989+High%3A+0.000998+Low%3A+0.000980+Close%3A+0.000998%22%2C+%22lrsi%22%3A+%22%22%2C+%22creation_date%22%3A+%222020-05-10+16%3A16%3A23%22%2C+%22hot_cold_label%22%3A+%22%22%2C+%22indicator_label%22%3A+%22%22%2C+%22price_value%22%3A+%7B%22open%22%3A+0.000989%2C+%22high%22%3A+0.000998%2C+%22low%22%3A+0.00098%2C+%22close%22%3A+0.000998%7D%2C+%22decimal_format%22%3A+%22%25.6f%22%7D%2C+%7B%22values%22%3A+%7B%22leading_span_a%22%3A+%220.00%22%2C+%22leading_span_b%22%3A+%220.00%22%7D%2C+%22exchange%22%3A+%22binance%22%2C+%22market%22%3A+%22BNT%2FETH%22%2C+%22base_currency%22%3A+%22BNT%22%2C+%22quote_currency%22%3A+%22ETH%22%2C+%22indicator%22%3A+%22ichimoku%22%2C+%22indicator_number%22%3A+1%2C+%22analysis%22%3A+%7B%22config%22%3A+%7B%22enabled%22%3A+true%2C+%22alert_enabled%22%3A+true%2C+%22alert_frequency%22%3A+%22once%22%2C+%22signal%22%3A+%5B%22leading_span_a%22%2C+%22leading_span_b%22%5D%2C+%22hot%22%3A+true%2C+%22cold%22%3A+true%2C+%22candle_period%22%3A+%224h%22%2C+%22hot_label%22%3A+%22Bullish+Alert%22%2C+%22cold_label%22%3A+%22Bearish+Alert%22%2C+%22indicator_label%22%3A+%22ICHIMOKU+4+hr%22%2C+%22mute_cold%22%3A+false%7D%2C+%22status%22%3A+%22cold%22%7D%2C+%22status%22%3A+%22cold%22%2C+%22last_status%22%3A+%22cold%22%2C+%22prices%22%3A+%22+Open%3A+0.000989+High%3A+0.000998+Low%3A+0.000980+Close%3A+0.000998%22%2C+%22lrsi%22%3A+%22%22%2C+%22creation_date%22%3A+%222020-05-10+16%3A16%3A23%22%2C+%22hot_cold_label%22%3A+%22Bearish+Alert%22%2C+%22indicator_label%22%3A+%22ICHIMOKU+4+hr%22%2C+%22price_value%22%3A+%7B%22open%22%3A+0.000989%2C+%22high%22%3A+0.000998%2C+%22low%22%3A+0.00098%2C+%22close%22%3A+0.000998%7D%2C+%22decimal_format%22%3A+%22%25.6f%22%7D%2C+%7B%22values%22%3A+%7B%22bbp%22%3A+%220.96%22%2C+%22mfi%22%3A+%2298.05%22%7D%2C+%22exchange%22%3A+%22binance%22%2C+%22market%22%3A+%22BNT%2FETH%22%2C+%22base_currency%22%3A+%22BNT%22%2C+%22quote_currency%22%3A+%22ETH%22%2C+%22indicator%22%3A+%22bbp%22%2C+%22indicator_number%22%3A+1%2C+%22analysis%22%3A+%7B%22config%22%3A+%7B%22enabled%22%3A+true%2C+%22alert_enabled%22%3A+true%2C+%22alert_frequency%22%3A+%22once%22%2C+%22candle_period%22%3A+%224h%22%2C+%22period_count%22%3A+20%2C+%22hot%22%3A+0.09%2C+%22cold%22%3A+0.8%2C+%22std_dev%22%3A+2%2C+%22signal%22%3A+%5B%22bbp%22%2C+%22mfi%22%5D%2C+%22hot_label%22%3A+%22Lower+Band%22%2C+%22cold_label%22%3A+%22Upper+Band+BB%22%2C+%22indicator_label%22%3A+%22Bollinger+4+hr%22%2C+%22mute_cold%22%3A+false%7D%2C+%22status%22%3A+%22cold%22%7D%2C+%22status%22%3A+%22cold%22%2C+%22last_status%22%3A+%22cold%22%2C+%22prices%22%3A+%22+Open%3A+0.000989+High%3A+0.000998+Low%3A+0.000980+Close%3A+0.000998%22%2C+%22lrsi%22%3A+%22%22%2C+%22creation_date%22%3A+%222020-05-10+16%3A16%3A23%22%2C+%22hot_cold_label%22%3A+%22Upper+Band+BB%22%2C+%22indicator_label%22%3A+%22Bollinger+4+hr%22%2C+%22price_value%22%3A+%7B%22open%22%3A+0.000989%2C+%22high%22%3A+0.000998%2C+%22low%22%3A+0.00098%2C+%22close%22%3A+0.000998%7D%2C+%22decimal_format%22%3A+%22%25.6f%22%7D%5D"
parsed = urllib.parse.unquote_plus(stuff) # <<< encoded form, get rid of +
as_json = json.loads(parsed)
print(as_json)
gives me
[{'values': {'momentum': '0.00'}, 'exchange': 'binance', 'market': 'BNT/ETH', 'base_currency': 'BNT', 'quote_currency': 'ETH', 'indicator': 'momentum', 'indicator_number': 0, 'analysis': {'config': {'enabled': True, 'alert_enabled': True, 'alert_frequency': 'once', 'signal': ['momentum'], 'hot': 0, 'cold': 0, 'candle_period': '4h', 'period_count': 10}, 'status': 'hot'}, 'status': 'hot', 'last_status': 'hot', 'prices': ' Open: 0.000989 High: 0.000998 Low: 0.000980 Close: 0.000998', 'lrsi': '', 'creation_date': '2020-05-10 16:16:23', 'hot_cold_label': '', 'indicator_label': '', 'price_value': {'open': 0.000989, 'high': 0.000998, 'low': 0.00098, 'close': 0.000998}, 'decimal_format': '%.6f'}, {'values': {'leading_span_a': '0.00', 'leading_span_b': '0.00'}, 'exchange': 'binance', 'market': 'BNT/ETH', 'base_currency': 'BNT', 'quote_currency': 'ETH', 'indicator': 'ichimoku', 'indicator_number': 1, 'analysis': {'config': {'enabled': True, 'alert_enabled': True, 'alert_frequency': 'once', 'signal': ['leading_span_a', 'leading_span_b'], 'hot': True, 'cold': True, 'candle_period': '4h', 'hot_label': 'Bullish Alert', 'cold_label': 'Bearish Alert', 'indicator_label': 'ICHIMOKU 4 hr', 'mute_cold': False}, 'status': 'cold'}, 'status': 'cold', 'last_status': 'cold', 'prices': ' Open: 0.000989 High: 0.000998 Low: 0.000980 Close: 0.000998', 'lrsi': '', 'creation_date': '2020-05-10 16:16:23', 'hot_cold_label': 'Bearish Alert', 'indicator_label': 'ICHIMOKU 4 hr', 'price_value': {'open': 0.000989, 'high': 0.000998, 'low': 0.00098, 'close': 0.000998}, 'decimal_format': '%.6f'}, {'values': {'bbp': '0.96', 'mfi': '98.05'}, 'exchange': 'binance', 'market': 'BNT/ETH', 'base_currency': 'BNT', 'quote_currency': 'ETH', 'indicator': 'bbp', 'indicator_number': 1, 'analysis': {'config': {'enabled': True, 'alert_enabled': True, 'alert_frequency': 'once', 'candle_period': '4h', 'period_count': 20, 'hot': 0.09, 'cold': 0.8, 'std_dev': 2, 'signal': ['bbp', 'mfi'], 'hot_label': 'Lower Band', 'cold_label': 'Upper Band BB', 'indicator_label': 'Bollinger 4 hr', 'mute_cold': False}, 'status': 'cold'}, 'status': 'cold', 'last_status': 'cold', 'prices': ' Open: 0.000989 High: 0.000998 Low: 0.000980 Close: 0.000998', 'lrsi': '', 'creation_date': '2020-05-10 16:16:23', 'hot_cold_label': 'Upper Band BB', 'indicator_label': 'Bollinger 4 hr', 'price_value': {'open': 0.000989, 'high': 0.000998, 'low': 0.00098, 'close': 0.000998}, 'decimal_format': '%.6f'}]
Whereas if you want a JSON string to POST somewhere, call as_string = json.dumps(parsed)
Related
Hps data:
/home/dotty/anaconda3/envs/JukVersionRoy/jukebox/train.py --hps=small_vqvae --name=small_vqvae --sample_length=9999999999999 --bs=2 --audio_files_dir=/home/dotty/anaconda3/envs/JukVersionRoy/jukebox/my-training-data --labels=False --train --aug_shift --aug_blend
Here
/
HPS ***************
{'rcall_command': '<unknown_rcall_command>', 'git_commit': '<unknown_git_commit>', 'name': '', 'debug_mem': False, 'debug_eval_files': False, 'debug_speed': False, 'debug_iters': 100, 'debug_batch': False, 'debug_grad_accum': False, 'debug_inputs': False, 'local_path': '', 'local_logdir': 'logs', 'max_len': 24, 'max_log': 32, 'save': True, 'save_iters': 20000, 'seed': 0, 'prior': False, 'log_steps': 100, 'func': '', 'audio_files_dir': '', 'finetune': '', 'english_only': False, 'bs': 1, 'bs_sample': 1, 'nworkers': 1, 'aug_shift': False, 'aug_blend': False, 'train_test_split': 0.9, 'train_shrink_factor': 1.0, 'test_shrink_factor': 1.0, 'p_unk': 0.1, 'min_duration': None, 'max_duration': None, 'n_tokens': 0, 'n_vocab': 0, 'use_tokens': False, 'curr_epoch': -1, 'restore_vqvae': '', 'levels': 2, 'downs_t': (1, 1), 'strides_t': (2, 2), 'hvqvae_multipliers': None, 'revival_threshold': 1.0, 'emb_width': 64, 'l_bins': 512, 'l_mu': 0.99, 'commit': 1.0, 'spectral': 0.0, 'multispectral': 1.0, 'loss_fn': 'l2', 'linf_k': 2048, 'lmix_l1': 0.0, 'lmix_l2': 0.0, 'lmix_linf': 0.0, 'use_bottleneck': True, 'depth': 3, 'width': 128, 'm_conv': 1.0, 'dilation_growth_rate': 1, 'dilation_cycle': None, 'vqvae_reverse_decoder_dilation': True, 'restore_prior': '', 'restore_prior_ddp': False, 'max_bow_genre_size': None, 'y_bins': 0, 'level': 0, 'cond_levels': None, 't_bins': 64, 'y_cond_as_bias': False, 'copy_input': False, 'merged_decoder': False, 'single_enc_dec': False, 'alignment_layer': None, 'alignment_head': None, 'n_ctx': 1024, 'prior_depth': 3, 'prior_width': 128, 'heads': 1, 'attn_order': 0, 'blocks': None, 'spread': None, 'attn_dropout': 0.0, 'resid_dropout': 0.0, 'emb_dropout': 0.0, 'zero_out': False, 'res_scale': False, 'pos_init': False, 'init_scale': 1.0, 'm_attn': 0.25, 'm_mlp': 1.0, 'c_res': 0, 'c_attn': 0, 'c_mlp': 0, 'cond_depth': 3, 'cond_width': 128, 'cond_m_conv': 1.0, 'cond_zero_out': False, 'cond_res_scale': False, 'cond_dilation_growth_rate': 1, 'cond_dilation_cycle': None, 'cond_c_res': 0, 'primed_chunk_size': None, 'selected_artists': '', 'temp_top': 1.0, 'temp_rest': 0.99, 'sample_length_in_seconds': 24, 'total_sample_length_in_seconds': 240, 'prime_loss_fraction': 0.1, 'restore_decoder': '', 'prime_depth': 3, 'prime_width': 128, 'prime_heads': 1, 'prime_attn_order': 0, 'prime_blocks': None, 'prime_spread': None, 'prime_attn_dropout': 0.0, 'prime_resid_dropout': 0.0, 'prime_emb_dropout': 0.0, 'prime_zero_out': False, 'prime_res_scale': False, 'prime_pos_init': False, 'prime_init_scale': 1.0, 'prime_m_attn': 0.25, 'prime_m_mlp': 1.0, 'prime_c_res': 0, 'prime_c_attn': 0, 'prime_c_mlp': 0, 'prime_rel_attn': False, 'prime_posemb_timescale': 10000, 'epochs': 10000, 'lr': 0.0003, 'clip': 1.0, 'beta1': 0.9, 'beta2': 0.999, 'ignore_grad_norm': 0, 'weight_decay': 0.0, 'eps': 1e-08, 'lr_warmup': 100.0, 'lr_decay': 10000000000.0, 'lr_gamma': 1.0, 'lr_scale': 1.0, 'lr_use_linear_decay': False, 'lr_start_linear_decay': 0, 'lr_use_cosine_decay': False, 'fp16': False, 'fp16_params': False, 'fp16_loss_scale': None, 'fp16_scale_window': 1000.0, 'fp16_opt': False, 'labels': True, 'labels_v3': False, 'dump': False, 'ema': True, 'ema_fused': True, 'cpu_ema': False, 'cpu_ema_freq': 100, 'reset_best_loss': False, 'reset_step': False, 'reset_opt': False, 'reset_shd': False, 'train': False, 'test': False, 'sample': False, 'sampler': 'ancestral', 'codes_logdir': '', 'date': None, 'labeller': 'top_genres', 'label_line': 0, 'iters_before_update': 1, 'grad_accum_iters': 0, 'mu': None, 'piped': False, 'pipe_depth': 8, 'break_train': 10000000000.0, 'break_test': 10000000000.0, 'exit_train': 10000000000.0, 'n_fft': 1024, 'hop_length': 256, 'window_size': 1024, 'sr': 44100, 'channels': 2, 'wav': '', 'n_inps': 1, 'n_hops': 2, 'n_segment': 1, 'n_total_segment': 1, 'n_segment_each': 1, 'prime_chunks': 4, 'sample_length': 0, 'sample_hop_length': 30000, 'max_silence_pad_length': 0, 'ignore_boundaries': False, 'use_nonrelative_specloss': True, 'multispec_loss_n_fft': (2048, 1024, 512), 'multispec_loss_hop_length': (240, 120, 50), 'multispec_loss_window_size': (1200, 600, 240), 'bucket': 128, 'ngpus': 16, 'argv': '/home/dotty/anaconda3/envs/JukVersionRoy/jukebox/train.py --hps=small_vqvae --name=small_vqvae --sample_length=9999999999999 --bs=2 --audio_files_dir=/home/dotty/anaconda3/envs/JukVersionRoy/jukebox/my-training-data --labels=False --train --aug_shift --aug_blend'}
SAMPLE LENGTH:
0
SAMPLE RATE:
44100
SAMPLE LENGTH / SAMPLE RATE:
0.0
MIN DURATION:
0
AssertionError Traceback (most recent call last)
/tmp/ipykernel_14312/1841762803.py in <module>
325
326
--> 327 run()
/tmp/ipykernel_14312/1841762803.py in run(hps, port, **kwargs)
283 print("HPS ***************")
284 print(hps)
--> 285 data_processor = DataProcessor(hps)
286
287 # Setup models
~/anaconda3/envs/JukVersionRoy/jukebox/data/data_processor.py in init(self, hps)
24 class DataProcessor():
25 def init(self, hps):
---> 26 self.dataset = FilesAudioDataset(hps)
27 duration = 1 if hps.prior else 600
28 hps.bandwidth = calculate_bandwidth(self.dataset, hps, duration=duration)
~/anaconda3/envs/JukVersionRoy/jukebox/data/files_dataset.py in init(self, hps)
24 print("MIN DURATION:")
25 print(self.min_duration)
---> 26 assert hps.sample_length / hps.sr < self.min_duration, f'Sample length {hps.sample_length} per sr {hps.sr} ({hps.sample_length / hps.sr:.2f}) should be shorter than min duration {self.min_duration}'
27 self.aug_shift = hps.aug_shift
28 self.labels = hps.labels
AssertionError: Sample length 0 per sr 44100 (0.00) should be shorter than min duration 0
Our files are not being read in, as the sample stayed at 44100 when we changed the files being read in. we went through all the files dataset.py, dataprocessor.py and audio_utils.py and could not finds where exactly the input files is being registered. Our path is correct. We are trying to sample length and min duration to be read in correctly.
I know how to set axis limits and whatnot, but how do I query for currently used axis limits?
I don't think this is possible unless you specifically set an axis limit first:
import altair as alt
from vega_datasets import data
source = data.cars.url
chart = alt.Chart(source).mark_circle().encode(
x='Horsepower:Q',
y='Miles_per_Gallon:Q',
)
chart.to_dict()
{'config': {'view': {'continuousWidth': 400, 'continuousHeight': 300}},
'data': {'url': 'https://cdn.jsdelivr.net/npm/vega-datasets#v1.29.0/data/cars.json'},
'mark': 'circle',
'encoding': {'x': {'field': 'Horsepower', 'type': 'quantitative'},
'y': {'field': 'Miles_per_Gallon', 'type': 'quantitative'}},
'$schema': 'https://vega.github.io/schema/vega-lite/v5.2.0.json'}
If you set the domain, you can see it in the spec:
chart = alt.Chart(source).mark_circle().encode(
x=alt.X('Horsepower:Q', scale=alt.Scale(domain=[0, 250])),
y='Miles_per_Gallon:Q',
)
chart.to_dict()
{'config': {'view': {'continuousWidth': 400, 'continuousHeight': 300}},
'data': {'url': 'https://cdn.jsdelivr.net/npm/vega-datasets#v1.29.0/data/cars.json'},
'mark': 'circle',
'encoding': {'x': {'field': 'Horsepower',
'scale': {'domain': [0, 250]},
'type': 'quantitative'},
'y': {'field': 'Miles_per_Gallon', 'type': 'quantitative'}},
'$schema': 'https://vega.github.io/schema/vega-lite/v5.2.0.json'}
and get it via chart.to_dict()['encoding']['x']['scale']['domain'].
I want to get the price precision of any futures asset.
What I tried:
client.get_symbol_info(symbol='My Symbol')
But this returns the precision of the Spot and I need the precision of the futures.
So theres this Command:
client.futures_exchange_info()
Which return this:
{'timezone': 'UTC', 'serverTime': 1630437033153, 'futuresType': 'U_MARGINED', 'rateLimits': [{'rateLimitType': 'REQUEST_WEIGHT', 'interval': 'MINUTE', 'intervalNum': 1, 'limit': 2400},
{'rateLimitType': 'ORDERS', 'interval': 'MINUTE', 'intervalNum': 1, 'limit': 1200},
{'rateLimitType': 'ORDERS', 'interval': 'SECOND', 'intervalNum': 10, 'limit': 300}],
'exchangeFilters': [],
'assets': [{'asset': 'USDT', 'marginAvailable': True, 'autoAssetExchange': '-10000'},
{'asset': 'BTC', 'marginAvailable': True, 'autoAssetExchange': '-0.00100000'},
{'asset': 'BNB', 'marginAvailable': True, 'autoAssetExchange': '-10'},
{'asset': 'ETH', 'marginAvailable': True, 'autoAssetExchange': '-5'},
{'asset': 'BUSD', 'marginAvailable': True, 'autoAssetExchange': '-10000'}],
'symbols': [{'symbol': 'BTCUSDT', 'pair': 'BTCUSDT', 'contractType': 'PERPETUAL', 'deliveryDate': 4133404800000, 'onboardDate': 1569398400000, 'status': 'TRADING', 'maintMarginPercent': '2.5000', 'requiredMarginPercent': '5.0000', 'baseAsset': 'BTC', 'quoteAsset': 'USDT', 'marginAsset': 'USDT', 'pricePrecision': 2, 'quantityPrecision': 3, 'baseAssetPrecision': 8, 'quotePrecision': 8, 'underlyingType': 'COIN', 'underlyingSubType': [], 'settlePlan': 0, 'triggerProtect': '0.0500', 'liquidationFee': '0.012000', 'marketTakeBound': '0.05', 'filters': [{'minPrice': '556.72', 'maxPrice': '4529764', 'filterType': 'PRICE_FILTER', 'tickSize': '0.01'}, {'stepSize': '0.001', 'filterType': 'LOT_SIZE', 'maxQty': '1000', 'minQty': '0.001'}, {'stepSize': '0.001', 'filterType': 'MARKET_LOT_SIZE', 'maxQty': '200', 'minQty': '0.001'}, {'limit': 200, 'filterType': 'MAX_NUM_ORDERS'}, {'limit': 10, 'filterType': 'MAX_NUM_ALGO_ORDERS'}, {'notional': '5', 'filterType': 'MIN_NOTIONAL'}, {'multiplierDown': '0.9500', 'multiplierUp': '1.0500', 'multiplierDecimal': '4', 'filterType': 'PERCENT_PRICE'}], 'orderTypes': ['LIMIT', 'MARKET', 'STOP', 'STOP_MARKET', 'TAKE_PROFIT', 'TAKE_PROFIT_MARKET', 'TRAILING_STOP_MARKET'], 'timeInForce': ['GTC', 'IOC', 'FOK', 'GTX']},
{'symbol': 'ETHUSDT', 'pair': 'ETHUSDT', 'contractType': 'PERPETUAL', 'deliveryDate': 4133404800000, 'onboardDate': 1569398400000, 'status': 'TRADING', 'maintMarginPercent': '2.5000', 'requiredMarginPercent': '5.0000', 'baseAsset': 'ETH', 'quoteAsset': 'USDT', 'marginAsset': 'USDT', 'pricePrecision': 2, 'quantityPrecision': 3, 'baseAssetPrecision': 8, 'quotePrecision': 8, 'underlyingType': 'COIN', 'underlyingSubType': [], 'settlePlan': 0, 'triggerProtect': '0.0500', 'liquidationFee': '0.005000', 'marketTakeBound': '0.05', 'filters': [{'minPrice': '39.86', 'maxPrice': '306177', 'filterType': 'PRICE_FILTER', 'tickSize': '0.01'}, {'stepSize': '0.001', 'filterType': 'LOT_SIZE', 'maxQty': '10000', 'minQty': '0.001'}, {'stepSize': '0.001', 'filterType': 'MARKET_LOT_SIZE', 'maxQty': '2000', 'minQty': '0.001'}, {'limit': 200, 'filterType': 'MAX_NUM_ORDERS'}, {'limit': 10, 'filterType': 'MAX_NUM_ALGO_ORDERS'}, {'notional': '5', 'filterType': 'MIN_NOTIONAL'}, {'multiplierDown': '0.9500', 'multiplierUp': '1.0500', 'multiplierDecimal': '4', 'filterType': 'PERCENT_PRICE'}], 'orderTypes': ['LIMIT', 'MARKET', 'STOP', 'STOP_MARKET', 'TAKE_PROFIT', 'TAKE_PROFIT_MARKET', 'TRAILING_STOP_MARKET'], 'timeInForce': ['GTC', 'IOC', 'FOK', 'GTX']}...
and so on.
I need to access the Value of 'quantityPrecision':.
Is there a way to like filter this list for the symbol value like 'BTCUSDT' and then return the Value of 'quantityPrecision':?
Thanks in Advance for any help.
info = client.futures_exchange_info()
def get_precision(symbol):
for x in info['symbols']:
if x['symbol'] == symbol:
return x['quantityPrecision']
print(get_precision('LTCUSDT'))
async function getLimits(asset) {
var filtered = await client.futures_exchange_info()
return Object.values( filtered.symbols).filter(O => O.symbol===asset)[0]
}
getLimits("BTCUSDT")
I am trying to remove an item ("logs") from my dictionary using the del method.
this is my code:
del response.json() ["logs"]
print(response.json())
this is my JSON dictionary:
{'count': 19804,
'next': {'limit': 1, 'offset': 1},
'previous': None,
'results':
[{'id': '334455',
'custom_id': '112',
'company': 28,
'company_name': 'Sunshine and Flowers',
'delivery_address': '34 olive beach house, #01-22, 612345',
'delivery_timeslot': {'lower': '2019-12-06T10:00:00Z', 'upper': '2019-12-06T13:00:00Z', 'bounds': '[)'},
'sender_name': 'Edward Shine',
'sender_email': '',
'sender_contact': '91234567',
'removed': None,
'recipient_name': 'Mint Shine',
'recipient_contact': '91234567',
'notes': '',
'items': [{'id': 21668, 'name': 'Loose hair flowers', 'quantity': 1, 'metadata': {}, 'removed': None}, {'id': 21667, 'name': "Groom's Boutonniere", 'quantity': 1, 'metadata': {}, 'removed': None}, {'id': 21666, 'name': 'Bridal Bouquet', 'quantity': 1, 'metadata': {}, 'removed': None}],
'latitude': '1.28283838383642000000',
'longitude': '103.2828037266201000000',
'created': '2019-08-15T05:40:30.385467Z',
'updated': '2019-08-15T05:41:27.930110Z',
'status': 'pending',
'verbose_status': 'Pending',
'**logs**': [{'id': 334455, 'order': '50c402d8-7c76-45b5-b883-e2fb887a507e', 'order_custom_id': '112', 'order_delivery_address': '34 olive beach house, #01-22, 6123458', 'order_delivery_timeslot': {'lower': '2019-12-06T10:00:00Z', 'upper': '2019-12-06T13:00:00Z', 'bounds': '[)'}, 'message': 'Order was created.', 'failure_reason': None, 'success_code': None, 'success_description': None, 'created': '2019-08-15T05:40:30.431790Z', 'removed': None}, {'id': 334455, 'order': '50c402d8-7c76-45b5-b883-e2fb887a507e', 'order_custom_id': '112', 'order_delivery_address': '34 olive beach house, #01-22, 612345', 'order_delivery_timeslot': {'lower': '2019-12-06T10:00:00Z', 'upper': '2019-12-06T13:00:00Z', 'bounds': '[)'}, 'message': 'Order is pending.', 'failure_reason': None, 'success_code': None, 'success_description': None, 'created': '2019-08-15T05:40:30.433139Z', 'removed': None}],
'reschedule_requests': [],
'signature': None}]
but it is saying this error
KeyError: 'logs'
what am i doing wrong? please assist
Every time you call response.json(), it returns a new dict, so the key you delete from response.json() won't be reflected in the next call to response.json().
You should instead save the returning value of response.json() to a variable before deleting the desired key:
data = response.json()
del data['results'][0]['logs']
print(data)
I have a dictionary object that is being returned to me from AWS. I need to pull the tag "based_on_ami" out of this dictionary. I have tried converting to a list, but I am new to programming and have not been able to figure out how to access Tags since they are a few levels down in the dictionary.
What is the best way for me to pull that tag out of the dictionary and put it into a variable i can use?
{
'Images':[
{
'Architecture':'x86_64',
'CreationDate':'2017-11-27T14:41:30.000Z',
'ImageId':'ami-8e73e0f4',
'ImageLocation':'23452345234545/java8server_ubuntu16-2b71edd1-f95e-4ee5-8fd6-d8a46975fdb5',
'ImageType':'machine',
'Public':False,
'OwnerId':'23452345234545',
'State':'available',
'BlockDeviceMappings':[
{
'DeviceName':'/dev/sda1',
'Ebs':{
'Encrypted':False,
'DeleteOnTermination':True,
'SnapshotId':'snap-0c10e8f5ced5b5240',
'VolumeSize':8,
'VolumeType':'gp2'
}
},
{
'DeviceName':'/dev/sdb',
'VirtualName':'ephemeral0'
},
{
'DeviceName':'/dev/sdc',
'VirtualName':'ephemeral1'
}
],
'EnaSupport':True,
'Hypervisor':'xen',
'Name':'java8server_ubuntu16-2b71edd1-f95e-4ee5-8fd6-d8a46975fdb5',
'RootDeviceName':'/dev/sda1',
'RootDeviceType':'ebs',
'SriovNetSupport':'simple',
'Tags':[
{
'Key':'service',
'Value':'baseami'
},
{
'Key':'cloudservice',
'Value':'ami'
},
{
'Key':'Name',
'Value':'java8server_ubuntu16-2b71edd1-f95e-4ee5-8fd6-d8a46975fdb5'
},
{
'Key':'os',
'Value':'ubuntu 16.04 lts'
},
{
'Key':'based_on_ami',
'Value':'ami-aa2ea8d0'
}
],
'VirtualizationType':'hvm'
}
],
'ResponseMetadata':{
'RequestId':'2c376c75-c31f-4aba-a058-173f3b125a00',
'HTTPStatusCode':200,
'HTTPHeaders':{
'content-type':'text/xml;charset=UTF-8',
'transfer-encoding':'chunked',
'vary':'Accept-Encoding',
'date':'Fri, 01 Dec 2017 18:17:53 GMT',
'server':'AmazonEC2'
},
'RetryAttempts':0
}
}
The best way to approach this type of problem is to find the value you're looking for, and then work outwards until you find a solution. You need to look at what is at each of those levels.
So, what are you looking for? You're looking for the Value for based_on_ami's Key. So your final step is going to be:
if obj['Key'] == 'based_on_ami':
# do something with obj['Value'].
But how do you get there? Well, the object is inside of a list, so you'll need to iterate the list:
for tag in <some list>:
if tag['Key'] == 'based_on_ami':
# do something with tag['Value'].
What is that list? It's the list of tags:
for tag in image['Tags']:
if tag['Key'] == 'based_on_ami':
# do something with tag['Value'].
And where are those tags? In an image object that you find in a list:
for image in image_list:
for tag in image['Tags']:
if tag['Key'] == 'based_on_ami':
# do something with tag['Value'].
The image list is the value found at the Images key in your initial dict.
image_list = my_data['Images']
for image in image_list:
for tag in image['Tags']:
if tag['Key'] == 'based_on_ami':
# do something with tag['Value'].
And now you're collecting all of those values, so you'll need a list and you'll need to append to it:
result = []
image_list = my_data['Images']
for image in image_list:
for tag in image['Tags']:
if tag['Key'] == 'based_on_ami':
result.append(tag['Value'])
So, I took your example above, and added another based_on_ami node with the value quack:
{'ResponseMetadata': {'RequestId': '2c376c75-c31f-4aba-a058-173f3b125a00', 'RetryAttempts': 0, 'HTTPHeaders': {'vary': 'Accept-Encoding', 'transfer-encoding': 'chunked', 'server': 'AmazonEC2', 'content-type': 'text/xml;charset=UTF-8', 'date': 'Fri, 01 Dec 2017 18:17:53 GMT'}, 'HTTPStatusCode': 200}, 'Images': [{'Public': False, 'CreationDate': '2017-11-27T14:41:30.000Z', 'BlockDeviceMappings': [{'Ebs': {'SnapshotId': 'snap-0c10e8f5ced5b5240', 'VolumeSize': 8, 'Encrypted': False, 'VolumeType': 'gp2', 'DeleteOnTermination': True}, 'DeviceName': '/dev/sda1'}, {'VirtualName': 'ephemeral0', 'DeviceName': '/dev/sdb'}, {'VirtualName': 'ephemeral1', 'DeviceName': '/dev/sdc'}], 'OwnerId': '23452345234545', 'ImageLocation': '23452345234545/java8server_ubuntu16-2b71edd1-f95e-4ee5-8fd6-d8a46975fdb5', 'RootDeviceName': '/dev/sda1', 'ImageType': 'machine', 'Hypervisor': 'xen', 'RootDeviceType': 'ebs', 'State': 'available', 'Architecture': 'x86_64', 'Name': 'java8server_ubuntu16-2b71edd1-f95e-4ee5-8fd6-d8a46975fdb5', 'Tags': [{'Value': 'baseami', 'Key': 'service'}, {'Value': 'ami', 'Key': 'cloudservice'}, {'Value': 'java8server_ubuntu16-2b71edd1-f95e-4ee5-8fd6-d8a46975fdb5', 'Key': 'Name'}, {'Value': 'ubuntu 16.04 lts', 'Key': 'os'}, {'Value': 'ami-aa2ea8d0', 'Key': 'based_on_ami'}], 'EnaSupport': True, 'SriovNetSupport': 'simple', 'ImageId': 'ami-8e73e0f4'}, {'Public': False, 'CreationDate': '2017-11-27T14:41:30.000Z', 'BlockDeviceMappings': [{'Ebs': {'SnapshotId': 'snap-0c10e8f5ced5b5240', 'VolumeSize': 8, 'Encrypted': False, 'VolumeType': 'gp2', 'DeleteOnTermination': True}, 'DeviceName': '/dev/sda1'}, {'VirtualName': 'ephemeral0', 'DeviceName': '/dev/sdb'}, {'VirtualName': 'ephemeral1', 'DeviceName': '/dev/sdc'}], 'VirtualizationType': 'hvm', 'OwnerId': '23452345234545', 'ImageLocation': '23452345234545/java8server_ubuntu16-2b71edd1-f95e-4ee5-8fd6-d8a46975fdb5', 'RootDeviceName': '/dev/sda1', 'ImageType': 'machine', 'Hypervisor': 'xen', 'RootDeviceType': 'ebs', 'State': 'available', 'Architecture': 'x86_64', 'Name': 'java8server_ubuntu16-2b71edd1-f95e-4ee5-8fd6-d8a46975fdb5', 'Tags': [{'Value': 'baseami', 'Key': 'service'}, {'Value': 'ami', 'Key': 'cloudservice'}, {'Value': 'java8server_ubuntu16-2b71edd1-f95e-4ee5-8fd6-d8a46975fdb5', 'Key': 'Name'}, {'Value': 'ubuntu 16.04 lts', 'Key': 'os'}, {'Value': 'quack', 'Key': 'based_on_ami'}], 'EnaSupport': True, 'SriovNetSupport': 'simple', 'ImageId': 'ami-8e73e0f4'}]}
My result:
['ami-aa2ea8d0', 'quack']
info = {...}
tags = []
for image in info['Images']:
for tag in image['Tags']:
if tag['Key'] == 'based_on_ami':
tags.append(tag['Value'])
print(tags)