|
|
@@ -19,13 +19,13 @@ mmgen_node_tools.Ticker: Display price information for cryptocurrency and other
|
|
|
# Possible alternatives:
|
|
|
# - https://min-api.cryptocompare.com/data/pricemultifull?fsyms=BTC,LTC&tsyms=USD,EUR
|
|
|
|
|
|
-import sys, os, re, time, datetime, json, yaml, random
|
|
|
+import os, re, time, datetime, json, yaml, random
|
|
|
from subprocess import run, PIPE, CalledProcessError
|
|
|
from decimal import Decimal
|
|
|
from collections import namedtuple
|
|
|
|
|
|
from mmgen.color import red, yellow, green, blue, orange, gray
|
|
|
-from mmgen.util import msg, msg_r, Msg, Msg_r, die, fmt, fmt_list, fmt_dict, list_gen
|
|
|
+from mmgen.util import msg, msg_r, rmsg, Msg, Msg_r, die, fmt, fmt_list, fmt_dict, list_gen, suf
|
|
|
from mmgen.ui import do_pager
|
|
|
|
|
|
homedir = os.getenv('HOME')
|
|
|
@@ -99,18 +99,21 @@ class DataSource:
|
|
|
if not os.path.exists(cfg.cachedir):
|
|
|
os.makedirs(cfg.cachedir)
|
|
|
|
|
|
- if not os.path.exists(self.json_fn):
|
|
|
- open(self.json_fn, 'w').write('{}')
|
|
|
-
|
|
|
use_cached_data = cfg.cached_data and not gcfg.download
|
|
|
|
|
|
if use_cached_data:
|
|
|
data_type = 'json'
|
|
|
- data_in = open(self.json_fn).read()
|
|
|
+ try:
|
|
|
+ data_in = open(self.json_fn).read()
|
|
|
+ except FileNotFoundError:
|
|
|
+ die(1, f'Cannot use cached data, because {self.json_fn_disp} does not exist')
|
|
|
else:
|
|
|
data_type = self.net_data_type
|
|
|
- elapsed = int(time.time() - os.stat(self.json_fn).st_mtime)
|
|
|
- if elapsed >= self.timeout or gcfg.testing:
|
|
|
+ try:
|
|
|
+ mtime = os.stat(self.json_fn).st_mtime
|
|
|
+ except FileNotFoundError:
|
|
|
+ mtime = 0
|
|
|
+ if (elapsed := int(time.time() - mtime)) >= self.timeout or gcfg.testing:
|
|
|
if gcfg.testing:
|
|
|
msg('')
|
|
|
self.fetch_delay()
|
|
|
@@ -146,20 +149,18 @@ class DataSource:
|
|
|
elif 'error' in data:
|
|
|
die(1, data['error'])
|
|
|
|
|
|
+ self.data = self.postprocess_data(data)
|
|
|
+
|
|
|
if use_cached_data:
|
|
|
+ self.json_text = None
|
|
|
if not cfg.quiet:
|
|
|
- msg(f'Using cached data from ~/{self.json_fn_rel}')
|
|
|
+ msg(f'Using cached data from {self.json_fn_disp}')
|
|
|
else:
|
|
|
- if os.path.exists(self.json_fn):
|
|
|
- os.rename(self.json_fn, self.json_fn + '.bak')
|
|
|
- with open(self.json_fn, 'w') as fh:
|
|
|
- fh.write(json_text)
|
|
|
- if not cfg.quiet:
|
|
|
- msg(f'JSON data cached to ~/{self.json_fn_rel}')
|
|
|
- if gcfg.download:
|
|
|
- sys.exit(0)
|
|
|
+ self.json_text = json_text
|
|
|
+ if cache_data(self, no_overwrite=True):
|
|
|
+ self.json_text = None
|
|
|
|
|
|
- return self.postprocess_data(data)
|
|
|
+ return self
|
|
|
|
|
|
def json_data_error_msg(self, json_text):
|
|
|
pass
|
|
|
@@ -168,13 +169,14 @@ class DataSource:
|
|
|
return data
|
|
|
|
|
|
@property
|
|
|
- def json_fn_rel(self):
|
|
|
- return os.path.relpath(self.json_fn, start=homedir)
|
|
|
+ def json_fn_disp(self):
|
|
|
+ return '~/' + os.path.relpath(self.json_fn, start=homedir)
|
|
|
|
|
|
class coinpaprika(base):
|
|
|
desc = 'CoinPaprika'
|
|
|
data_desc = 'cryptocurrency data'
|
|
|
api_host = 'api.coinpaprika.com'
|
|
|
+ api_proto = 'https'
|
|
|
ratelimit = 240
|
|
|
btc_ratelimit = 10
|
|
|
net_data_type = 'json'
|
|
|
@@ -185,16 +187,19 @@ class DataSource:
|
|
|
self.asset_limit = int(cfg.asset_limit or self.dfl_asset_limit)
|
|
|
|
|
|
def rate_limit_errmsg(self, elapsed):
|
|
|
+ rem = self.timeout - elapsed
|
|
|
return (
|
|
|
- f'Rate limit exceeded! Retry in {self.timeout-elapsed} seconds' +
|
|
|
+ f'Rate limit exceeded! Retry in {rem} second{suf(rem)}' +
|
|
|
('' if cfg.btc_only else ', or use --cached-data or --btc'))
|
|
|
|
|
|
@property
|
|
|
def api_url(self):
|
|
|
return (
|
|
|
- f'https://{self.api_host}/v1/tickers/btc-bitcoin' if cfg.btc_only else
|
|
|
- f'https://{self.api_host}/v1/tickers?limit={self.asset_limit}' if self.asset_limit else
|
|
|
- f'https://{self.api_host}/v1/tickers')
|
|
|
+ f'{self.api_proto}://{self.api_host}/v1/tickers/btc-bitcoin'
|
|
|
+ if cfg.btc_only else
|
|
|
+ f'{self.api_proto}://{self.api_host}/v1/tickers?limit={self.asset_limit}'
|
|
|
+ if self.asset_limit else
|
|
|
+ f'{self.api_proto}://{self.api_host}/v1/tickers')
|
|
|
|
|
|
@property
|
|
|
def json_fn(self):
|
|
|
@@ -267,7 +272,8 @@ class DataSource:
|
|
|
'last_updated': data['regularMarketTime']}
|
|
|
|
|
|
def rate_limit_errmsg(self, elapsed):
|
|
|
- return f'Rate limit exceeded! Retry in {self.timeout-elapsed} seconds, or use --cached-data'
|
|
|
+ rem = self.timeout - elapsed
|
|
|
+ return f'Rate limit exceeded! Retry in {rem} second{suf(rem)}, or use --cached-data'
|
|
|
|
|
|
@property
|
|
|
def json_fn(self):
|
|
|
@@ -361,10 +367,10 @@ def gen_data(data):
|
|
|
checking for duplicates.
|
|
|
"""
|
|
|
|
|
|
- def dup_sym_errmsg(dup_sym):
|
|
|
+ def dup_sym_errmsg(data_type, dup_sym):
|
|
|
return (
|
|
|
f'The symbol {dup_sym!r} is shared by the following assets:\n' +
|
|
|
- '\n ' + '\n '.join(d['id'] for d in data['cc'] if d['symbol'] == dup_sym) +
|
|
|
+ '\n ' + '\n '.join(d['id'] for d in data[data_type].data if d['symbol'] == dup_sym) +
|
|
|
'\n\nPlease specify the asset by one of the full IDs listed above\n' +
|
|
|
f'instead of {dup_sym!r}')
|
|
|
|
|
|
@@ -381,6 +387,73 @@ def gen_data(data):
|
|
|
if error:
|
|
|
die(1, 'Missing data, exiting')
|
|
|
|
|
|
+ class process_data:
|
|
|
+
|
|
|
+ def cc():
|
|
|
+ nonlocal btcusd
|
|
|
+ for d in data['cc'].data:
|
|
|
+ if d['id'] == 'btc-bitcoin':
|
|
|
+ btcusd = Decimal(str(d['quotes']['USD']['price']))
|
|
|
+ break
|
|
|
+ else:
|
|
|
+ raise ValueError('malformed cryptocurrency data')
|
|
|
+ for k in ('id', 'symbol'):
|
|
|
+ for d in data['cc'].data:
|
|
|
+ if wants[k]:
|
|
|
+ if d[k] in wants[k]:
|
|
|
+ if d[k] in found[k]:
|
|
|
+ die(1, dup_sym_errmsg('cc', d[k]))
|
|
|
+ if not 'price_usd' in d:
|
|
|
+ d['price_usd'] = Decimal(str(d['quotes']['USD']['price']))
|
|
|
+ d['price_btc'] = Decimal(str(d['quotes']['USD']['price'])) / btcusd
|
|
|
+ d['percent_change_24h'] = d['quotes']['USD']['percent_change_24h']
|
|
|
+ d['percent_change_7d'] = d['quotes']['USD']['percent_change_7d']
|
|
|
+ d['percent_change_30d'] = d['quotes']['USD']['percent_change_30d']
|
|
|
+ d['percent_change_1y'] = d['quotes']['USD']['percent_change_1y']
|
|
|
+ d['last_updated'] = int(datetime.datetime.fromisoformat(
|
|
|
+ d['last_updated']).timestamp())
|
|
|
+ yield (d['id'], d)
|
|
|
+ found[k].add(d[k])
|
|
|
+ wants[k].remove(d[k])
|
|
|
+ if d[k] in usr_rate_assets_want[k]:
|
|
|
+ rate_assets[d['symbol']] = d # NB: using symbol instead of ID for key
|
|
|
+ else:
|
|
|
+ break
|
|
|
+
|
|
|
+ def fi():
|
|
|
+ get_id = src_cls['fi'].get_id
|
|
|
+ conv_func = src_cls['fi'].conv_data
|
|
|
+ for k, v in data['fi'].data.items():
|
|
|
+ id = get_id(k, v)
|
|
|
+ if wants['id']:
|
|
|
+ if id in wants['id']:
|
|
|
+ if not isinstance(v, dict):
|
|
|
+ die(2, str(v))
|
|
|
+ if id in found['id']:
|
|
|
+ die(1, dup_sym_errmsg('fi', id))
|
|
|
+ if hist := hist_close.get(k):
|
|
|
+ spot = v['regularMarketPrice']['raw']
|
|
|
+ v['pct_chg_1wk'] = (spot / hist.close_1wk - 1) * 100
|
|
|
+ v['pct_chg_4wks'] = (spot / hist.close_4wks - 1) * 100 # 4 weeks ≈ 1 month
|
|
|
+ v['pct_chg_1y'] = (spot / hist.close_1y - 1) * 100
|
|
|
+ else:
|
|
|
+ v['pct_chg_1wk'] = v['pct_chg_4wks'] = v['pct_chg_1y'] = None
|
|
|
+ yield (id, conv_func(id, v, btcusd))
|
|
|
+ found['id'].add(id)
|
|
|
+ wants['id'].remove(id)
|
|
|
+ if id in usr_rate_assets_want['id']: # NB: using symbol instead of ID for key:
|
|
|
+ rate_assets[k] = conv_func(id, v, btcusd)
|
|
|
+ else:
|
|
|
+ break
|
|
|
+
|
|
|
+ def hi():
|
|
|
+ ret = namedtuple('historical_closing_prices', ['close_1wk', 'close_4wks', 'close_1y'])
|
|
|
+ nonlocal hist_close
|
|
|
+ for k, v in data['hi'].data.items():
|
|
|
+ hist = tuple(v.values())
|
|
|
+ hist_close[k] = ret(hist[-2]['close'], hist[-5]['close'], hist[0]['close'])
|
|
|
+ return ()
|
|
|
+
|
|
|
rows_want = {
|
|
|
'id': {r.id for r in cfg.rows if isinstance(r, tuple) and r.id} - {'usd-us-dollar'},
|
|
|
'symbol': {r.symbol for r in cfg.rows if isinstance(r, tuple) and r.id is None} - {'USD'}}
|
|
|
@@ -403,62 +476,25 @@ def gen_data(data):
|
|
|
|
|
|
wants = {k: rows_want[k] | usr_wants[k] for k in ('id', 'symbol')}
|
|
|
|
|
|
- for d in data['cc']:
|
|
|
- if d['id'] == 'btc-bitcoin':
|
|
|
- btcusd = Decimal(str(d['quotes']['USD']['price']))
|
|
|
- break
|
|
|
-
|
|
|
- get_id = src_cls['fi'].get_id
|
|
|
- conv_func = src_cls['fi'].conv_data
|
|
|
-
|
|
|
- for k, v in data['fi'].items():
|
|
|
- id = get_id(k, v)
|
|
|
- if wants['id']:
|
|
|
- if id in wants['id']:
|
|
|
- if not isinstance(v, dict):
|
|
|
- die(2, str(v))
|
|
|
- if id in found['id']:
|
|
|
- die(1, dup_sym_errmsg(id))
|
|
|
- if m := data['hi'].get(k):
|
|
|
- spot = v['regularMarketPrice']['raw']
|
|
|
- hist = tuple(m.values())
|
|
|
- v['pct_chg_1wk'], v['pct_chg_4wks'], v['pct_chg_1y'] = (
|
|
|
- (spot / hist[-2]['close'] - 1) * 100,
|
|
|
- (spot / hist[-5]['close'] - 1) * 100, # 4 weeks ≈ 1 month
|
|
|
- (spot / hist[0]['close'] - 1) * 100)
|
|
|
- else:
|
|
|
- v['pct_chg_1wk'] = v['pct_chg_4wks'] = v['pct_chg_1y'] = None
|
|
|
- yield (id, conv_func(id, v, btcusd))
|
|
|
- found['id'].add(id)
|
|
|
- wants['id'].remove(id)
|
|
|
- if id in usr_rate_assets_want['id']:
|
|
|
- rate_assets[k] = conv_func(id, v, btcusd) # NB: using symbol instead of ID for key
|
|
|
- else:
|
|
|
- break
|
|
|
-
|
|
|
- for k in ('id', 'symbol'):
|
|
|
- for d in data['cc']:
|
|
|
- if wants[k]:
|
|
|
- if d[k] in wants[k]:
|
|
|
- if d[k] in found[k]:
|
|
|
- die(1, dup_sym_errmsg(d[k]))
|
|
|
- if not 'price_usd' in d:
|
|
|
- d['price_usd'] = Decimal(str(d['quotes']['USD']['price']))
|
|
|
- d['price_btc'] = Decimal(str(d['quotes']['USD']['price'])) / btcusd
|
|
|
- d['percent_change_24h'] = d['quotes']['USD']['percent_change_24h']
|
|
|
- d['percent_change_7d'] = d['quotes']['USD']['percent_change_7d']
|
|
|
- d['percent_change_30d'] = d['quotes']['USD']['percent_change_30d']
|
|
|
- d['percent_change_1y'] = d['quotes']['USD']['percent_change_1y']
|
|
|
- # .replace('Z','+00:00') -- Python 3.9 backport
|
|
|
- d['last_updated'] = int(datetime.datetime.fromisoformat(
|
|
|
- d['last_updated'].replace('Z', '+00:00')).timestamp())
|
|
|
- yield (d['id'], d)
|
|
|
- found[k].add(d[k])
|
|
|
- wants[k].remove(d[k])
|
|
|
- if d[k] in usr_rate_assets_want[k]:
|
|
|
- rate_assets[d['symbol']] = d # NB: using symbol instead of ID for key
|
|
|
+ btcusd = Decimal('1') # dummy
|
|
|
+ hist_close = {}
|
|
|
+
|
|
|
+ parse_fail = False
|
|
|
+ for data_type in ('cc', 'hi', 'fi'): # 'fi' depends on 'cc' and 'hi' so must go last
|
|
|
+ if data_type in data:
|
|
|
+ try:
|
|
|
+ yield from getattr(process_data, data_type)()
|
|
|
+ except Exception as e:
|
|
|
+ rmsg(f'Error in source data {data_type!r}: {e}')
|
|
|
+ parse_fail = True
|
|
|
else:
|
|
|
- break
|
|
|
+ cache_data(data[data_type])
|
|
|
+
|
|
|
+ if parse_fail:
|
|
|
+ die(2, 'Invalid data encountered, exiting')
|
|
|
+
|
|
|
+ if gcfg.download:
|
|
|
+ return
|
|
|
|
|
|
check_assets_found(usr_wants, found)
|
|
|
|
|
|
@@ -485,6 +521,18 @@ def gen_data(data):
|
|
|
'price_btc': Decimal(1) / btcusd,
|
|
|
'last_updated': None})
|
|
|
|
|
|
+def cache_data(data_src, no_overwrite=False):
|
|
|
+ if data_src.json_text:
|
|
|
+ if os.path.exists(data_src.json_fn):
|
|
|
+ if no_overwrite:
|
|
|
+ return False
|
|
|
+ os.rename(data_src.json_fn, data_src.json_fn + '.bak')
|
|
|
+ with open(data_src.json_fn, 'w') as fh:
|
|
|
+ fh.write(data_src.json_text)
|
|
|
+ if not cfg.quiet:
|
|
|
+ msg(f'JSON data cached to {data_src.json_fn_disp}')
|
|
|
+ return True
|
|
|
+
|
|
|
def main():
|
|
|
|
|
|
def update_sample_file(usr_cfg_file):
|
|
|
@@ -525,7 +573,7 @@ def main():
|
|
|
return
|
|
|
|
|
|
if gcfg.list_ids:
|
|
|
- do_pager('\n'.join(e['id'] for e in src_data['cc']))
|
|
|
+ do_pager('\n'.join(e.data['id'] for e in src_data['cc']))
|
|
|
return
|
|
|
|
|
|
global now
|
|
|
@@ -533,6 +581,9 @@ def main():
|
|
|
|
|
|
data = dict(gen_data(src_data))
|
|
|
|
|
|
+ if gcfg.download:
|
|
|
+ return
|
|
|
+
|
|
|
(do_pager if cfg.pager else Msg_r)(
|
|
|
'\n'.join(getattr(Ticker, cfg.clsname)(data).gen_output()) + '\n')
|
|
|
|
|
|
@@ -695,6 +746,11 @@ def make_cfg(gcfg_arg):
|
|
|
cmd_args = gcfg._args
|
|
|
cfg_in = get_cfg_in()
|
|
|
|
|
|
+ if gcfg.test_suite: # required for testing with overlay
|
|
|
+ from . import Ticker as this_mod
|
|
|
+ this_mod.src_cls = src_cls
|
|
|
+ this_mod.cfg_in = cfg_in
|
|
|
+
|
|
|
usr_rows = parse_usr_asset_arg('add_rows')
|
|
|
usr_columns = parse_usr_asset_arg('add_columns', use_cf_file=True)
|
|
|
query = parse_query_arg(cmd_args[0]) if cmd_args else None
|