crypto, forex and weather api caller refactored
This commit is contained in:
parent
24e4e890ef
commit
24a1bc882f
447
api_caller.py
447
api_caller.py
@ -27,56 +27,6 @@ def getInput(Block=False):
|
||||
msg = ''
|
||||
return msg
|
||||
|
||||
def readJSON(file_path, max_stocks):
|
||||
|
||||
stock_settings = json.load(open(file_path, 'r'))
|
||||
|
||||
stock_info = stock_settings['symbols']
|
||||
symbols = list(stock_info.keys())
|
||||
|
||||
return symbols, stock_info
|
||||
|
||||
def readCryptoJSON(file_path, max_crypto):
|
||||
|
||||
symbols = []
|
||||
names = []
|
||||
|
||||
all_crypto_settings = json.load(open(file_path, 'r'))
|
||||
|
||||
i = 0
|
||||
unique_bases = []
|
||||
crypto_info = all_crypto_settings['symbols']
|
||||
|
||||
|
||||
crypto_info = {}
|
||||
for row in CSV:
|
||||
|
||||
if i < max_crypto:
|
||||
|
||||
i += 1
|
||||
|
||||
try:
|
||||
symbol, name, base, current_price, opening_price = row
|
||||
symbols.append(symbol)
|
||||
names.append(name)
|
||||
coin_info[name] = [symbol, base, current_price, opening_price]
|
||||
if base not in unique_bases:
|
||||
unique_bases.append(base)
|
||||
except:
|
||||
symbol, name, base = row
|
||||
if base not in unique_bases:
|
||||
unique_bases.append(base)
|
||||
symbols.append(symbol)
|
||||
names.append(name)
|
||||
coin_info[name] = [symbol, base]
|
||||
else:
|
||||
|
||||
break
|
||||
|
||||
f.close()
|
||||
|
||||
return names, coin_info, unique_bases
|
||||
|
||||
def emptyInfo(symbols, stock_info):
|
||||
update = False
|
||||
for symbol in symbols:
|
||||
@ -90,7 +40,7 @@ def updateUpdate(NY_time):
|
||||
f.write(NY_str + '\n')
|
||||
f.close()
|
||||
|
||||
def updateStockPricesFinhubb():
|
||||
def updateStocksFinhubb():
|
||||
max_stocks = 200
|
||||
finnhubsandboxAPIkey = "sandbox_c24qddqad3ickpckgg8g" #Finnhub
|
||||
finnhubAPIkey = "c24qddqad3ickpckgg80" #Finnhub
|
||||
@ -124,14 +74,18 @@ def updateStockPricesFinhubb():
|
||||
logf.write('. type: ' + str(exc_type))
|
||||
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
||||
|
||||
|
||||
|
||||
def updateStockPrices():
|
||||
def updateStocks():
|
||||
|
||||
iexAPIkey = 'pk_d066d39789bd41caac209bca850a35db' #IEX
|
||||
|
||||
max_stocks = 200
|
||||
symbols, stock_info = readJSON('csv/stocks_settings.json', max_stocks)
|
||||
|
||||
f = open('csv/stocks_settings.json', 'r')
|
||||
all_stocks_settings = json.load(f)
|
||||
f.close()
|
||||
stock_info = all_stocks_settings['symbols']
|
||||
symbols = list(stock_info.keys())
|
||||
|
||||
try:
|
||||
|
||||
|
||||
@ -174,15 +128,11 @@ def updateStockPrices():
|
||||
|
||||
stock_info[symbol] = {'current': current_prices[i], 'opening': opening_prices[i]}
|
||||
|
||||
f = open('csv/stocks_settings.json', 'r')
|
||||
all_stocks_settings = json.load(f)
|
||||
f.close()
|
||||
|
||||
all_stocks_settings['symbols'] = stock_info
|
||||
|
||||
json.dump(all_stocks_settings, open('csv/stocks_settings.json', 'w+'))
|
||||
|
||||
|
||||
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
@ -195,34 +145,120 @@ def updateStockPrices():
|
||||
|
||||
def updateCrypto():
|
||||
coingecko_client = CoinGeckoAPI()
|
||||
symbol_base, coin_info = readJSON('csv/crypto_settings.json', max_crypto)
|
||||
|
||||
f = open('csv/crypto_settings.json', 'r')
|
||||
all_crypto_settings = json.load(f)
|
||||
f.close()
|
||||
|
||||
coin_info = all_crypto_settings['symbols']
|
||||
symbol_base = list(coin_info.keys())
|
||||
|
||||
symbols = [sb.split(',')[0] for sb in symbol_base]
|
||||
bases = [sb.split(',')[1] for sb in symbol_base]
|
||||
unique_bases = list(set(bases))
|
||||
coins = []
|
||||
#coins['symbol, base'].keys() = ['name','current','24hr change']
|
||||
|
||||
|
||||
|
||||
orderd_syms = [] # so that ymbols and coins in same order
|
||||
# convert symbols to ids for coingecko
|
||||
for coin in coingecko_client.get_coins_list():
|
||||
if coin['symbol'].upper() in symbols:
|
||||
ordered_syms.append(coin['symbol'].upper())
|
||||
if coin['id'] != 'binance-peg-cardano': # hackaround for two coins with symbol ada
|
||||
coins.append(coin['id'])
|
||||
coins = []
|
||||
|
||||
# coingecko rate limited me from calling this too often
|
||||
#coin_list = coingecko_client.get_coins_list()
|
||||
#json.dump(coin_list, open('csv/coin_list.json', 'w+'))
|
||||
|
||||
f = open('csv/coin_list.json', 'r')
|
||||
coin_list = json.load(f)
|
||||
f.close()
|
||||
|
||||
# this might be super slow as coin_list is large
|
||||
for s in symbols:
|
||||
for c in coin_list:
|
||||
if c['symbol'].upper() == s and c['id'] != 'binance-peg-cardano': # hackaround for two coins with symbol ada:
|
||||
coins.append(c['id'])
|
||||
|
||||
crypto_info = {}
|
||||
print(coins)
|
||||
|
||||
#cypto_info['symbol, base'].keys() = ['current','24hr change']
|
||||
try:
|
||||
response = coingecko_client.get_price(ids=','.join(coins), vs_currencies = unique_bases, include_24hr_change=True)
|
||||
|
||||
#print(response)
|
||||
|
||||
for coin in coins:
|
||||
#coin_info[name] = [symbol, base, current_price, opening_price]
|
||||
|
||||
for i,sb in enumerate(symbol_base):
|
||||
#coin_info[name] = [symbol, base]
|
||||
#info = coin_info[coin]
|
||||
#CSV.write(info[0] + ',' + coin + ',' + info[1] + ',' +str(response[coin][info[1]]) + ',' + str(response[coin]['usd_24h_change']) + '\n')
|
||||
crypto_info[sb] = {'current':response[coins[i]][bases[i].lower()], '24hr_change':response[coins[i]]['usd_24h_change']}
|
||||
|
||||
CSV.write(info[0] + ',' + coin + ',' + info[1] + ',' +str(response[coin][info[1]]) + ',' + str(response[coin]['usd_24h_change']) + '\n')
|
||||
CSV.close()
|
||||
all_crypto_settings['symbols'] = crypto_info
|
||||
|
||||
json.dump(all_crypto_settings, open('csv/crypto_settings.json', 'w+'))
|
||||
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
logf.write(str(e))
|
||||
logf.write('. file: ' + fname)
|
||||
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
||||
logf.write('. type: ' + str(exc_type))
|
||||
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
||||
|
||||
def updateForex():
|
||||
|
||||
f = open('csv/forex_settings.json', 'r')
|
||||
all_forex_settings = json.load(f)
|
||||
f.close()
|
||||
|
||||
forex_info = all_forex_settings['symbols']
|
||||
symbol_base = list(forex_info.keys())
|
||||
|
||||
symbols = [sb.split(',')[0] for sb in symbol_base]
|
||||
bases = [sb.split(',')[1] for sb in symbol_base]
|
||||
unique_bases = list(set(bases))
|
||||
|
||||
all_responses = []
|
||||
|
||||
# get timeseries from two days ago until today in case it hasnt updated for today yet
|
||||
yesterday = datetime.now() - timedelta(1)
|
||||
yesteryesterday = datetime.now() - timedelta(2)
|
||||
|
||||
str_tod = datetime.strftime(datetime.now(), '%Y-%m-%d')
|
||||
str_yest = datetime.strftime(yesterday, '%Y-%m-%d')
|
||||
str_yestyest = datetime.strftime(yesteryesterday, '%Y-%m-%d')
|
||||
|
||||
try:
|
||||
for base in unique_bases:
|
||||
|
||||
url = 'https://api.frankfurter.app/{}..{}?from={}'.format(str_yestyest, str_tod, base)
|
||||
r = requests.get(url)
|
||||
all_data = r.json()
|
||||
all_responses.append(all_data)
|
||||
|
||||
|
||||
c_dict = {}
|
||||
|
||||
|
||||
|
||||
for i,curr in enumerate(symbols):
|
||||
|
||||
for response in all_responses:
|
||||
if response['base'] == bases[i]:
|
||||
print(response['rates'])
|
||||
try:
|
||||
current = response['rates'][str_tod][curr]
|
||||
yesterday = response['rates'][str_yest][curr]
|
||||
except KeyError:
|
||||
# if it hasnt been updated for today yet use yesterdays price
|
||||
current = response['rates'][str_yest][curr]
|
||||
yesterday = response['rates'][str_yestyest][curr]
|
||||
|
||||
change = current - yesterday
|
||||
|
||||
c_dict[symbol_base[i]] = {'current':current, '24hr_change':change}
|
||||
|
||||
all_forex_settings['symbols'] = c_dict
|
||||
json.dump(all_forex_settings, open( "csv/forex_settings.json", 'w+' ))
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
@ -279,17 +315,25 @@ def updateWeather():
|
||||
try:
|
||||
gn = geocoders.GeoNames(username='fintic')
|
||||
|
||||
|
||||
f = open( "csv/weather_location.txt", 'r' )
|
||||
line = next(f)
|
||||
locations = line.split(',')
|
||||
f = open('csv/daily_weather.json', 'r')
|
||||
all_daily_settings = json.load(f)
|
||||
f.close()
|
||||
|
||||
current_weathers = []
|
||||
daily_weathers = []
|
||||
f = open('csv/daily_weather.json', 'r')
|
||||
all_current_settings = json.load(f)
|
||||
f.close()
|
||||
|
||||
current_locations = list(all_current_settings['locations'].keys())
|
||||
daily_locations = list(all_daily_settings['locations'].keys())
|
||||
|
||||
all_locations = list(set(current_locations + daily_locations))
|
||||
|
||||
|
||||
for location in locations:
|
||||
current_weathers = {}
|
||||
daily_weathers = {}
|
||||
|
||||
|
||||
for location in all_locations:
|
||||
loc = gn.geocode(location)
|
||||
|
||||
|
||||
@ -318,8 +362,8 @@ def updateWeather():
|
||||
current_weather['rain_chance'] = r.json()['hourly'][0]['pop']
|
||||
|
||||
|
||||
|
||||
current_weathers.append(current_weather)
|
||||
if location in current_locations:
|
||||
current_weathers[location] = current_weather
|
||||
|
||||
daily_weather = []
|
||||
daily = r.json()['daily']
|
||||
@ -331,13 +375,18 @@ def updateWeather():
|
||||
dct['min_temp'] = day['temp']['min']
|
||||
dct['max_temp'] = day['temp']['max']
|
||||
daily_weather.append(dct)
|
||||
|
||||
daily_weathers.append(daily_weather)
|
||||
|
||||
|
||||
if location in daily_locations:
|
||||
daily_weathers[location] = daily_weather
|
||||
|
||||
|
||||
|
||||
json.dump( current_weathers, open( "csv/current_weather.json", 'w+' ))
|
||||
json.dump( daily_weathers, open( "csv/daily_weather.json", 'w+' ))
|
||||
all_current_settings['locations'] = current_weathers
|
||||
all_daily_settings['locations'] = daily_weathers
|
||||
|
||||
json.dump( all_current_settings, open( "csv/current_weather.json", 'w+' ))
|
||||
json.dump( all_daily_settings, open( "csv/daily_weather.json", 'w+' ))
|
||||
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
@ -348,35 +397,45 @@ def updateWeather():
|
||||
logf.write('. type: ' + str(exc_type))
|
||||
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
||||
|
||||
|
||||
def updateForex():
|
||||
|
||||
|
||||
def updateLeagueTables(api_key, league_ids):
|
||||
|
||||
f = open('csv/league_tables.json', 'r')
|
||||
all_settings = json.load(f)
|
||||
f.close()
|
||||
|
||||
leagues = all_settings['leagues'].keys()
|
||||
leagues_info = {}
|
||||
try:
|
||||
base = 'USD'
|
||||
yesterday = datetime.now() - timedelta(1)
|
||||
|
||||
str_tod = datetime.strftime(datetime.now(), '%Y-%m-%d')
|
||||
str_yest = datetime.strftime(yesterday, '%Y-%m-%d')
|
||||
|
||||
url = 'https://api.frankfurter.app/{}..{}?from={}'.format(str_yest, str_tod, base)
|
||||
r = requests.get(url)
|
||||
all_data = r.json()
|
||||
|
||||
currencies = ['AUD', 'CAD', 'CHF', 'EUR', 'GBP', 'JPY', 'NZD']
|
||||
|
||||
c_dict = {}
|
||||
|
||||
for curr in currencies:
|
||||
for league in leagues:
|
||||
league_id = league_ids[league]
|
||||
url = 'https://www.thesportsdb.com/api/v1/json/{}/lookuptable.php?l={}&s=2020-2021'.format(api_key, league_id)
|
||||
|
||||
current = all_data['rates'][str_tod][curr]
|
||||
yesterday = all_data['rates'][str_yest][curr]
|
||||
r = requests.get(url)
|
||||
try:
|
||||
all_data = r.json()
|
||||
except: # there is no data available
|
||||
continue
|
||||
teams = []
|
||||
|
||||
change = current - yesterday
|
||||
|
||||
c_dict[curr] = [current, yesterday]
|
||||
|
||||
|
||||
json.dump([base, c_dict], open( "csv/currency.json", 'w+' ))
|
||||
for i in range(len(all_data['table'])):
|
||||
team = {}
|
||||
|
||||
|
||||
|
||||
team['name'] = all_data['table'][i]['strTeam']
|
||||
team['wins'] = all_data['table'][i]['intWin']
|
||||
team['loss'] = all_data['table'][i]['intLoss']
|
||||
team['draw'] = all_data['table'][i]['intDraw']
|
||||
team['played'] = all_data['table'][i]['intPlayed']
|
||||
team['standing'] = all_data['table'][i]['intRank']
|
||||
team['points'] = all_data['table'][i]['intPoints']
|
||||
|
||||
teams.append(team)
|
||||
leagues_info[league] = teams
|
||||
all_settings['leagues'] = leagues_info
|
||||
json.dump(all_settings, open( "csv/league_tables.json".format(league), 'w+' ))
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
@ -387,101 +446,60 @@ def updateForex():
|
||||
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
||||
|
||||
|
||||
def updateLeagueTable(api_key, league_id):
|
||||
try:
|
||||
url = 'https://www.thesportsdb.com/api/v1/json/{}/lookuptable.php?l={}&s=2020-2021'.format(api_key, league_id)
|
||||
|
||||
r = requests.get(url)
|
||||
all_data = r.json()
|
||||
|
||||
|
||||
|
||||
premier_teams = []
|
||||
|
||||
|
||||
for i in range(len(all_data['table'])):
|
||||
team = {}
|
||||
|
||||
|
||||
|
||||
team['name'] = all_data['table'][i]['strTeam']
|
||||
team['wins'] = all_data['table'][i]['intWin']
|
||||
team['loss'] = all_data['table'][i]['intLoss']
|
||||
team['draw'] = all_data['table'][i]['intDraw']
|
||||
team['played'] = all_data['table'][i]['intPlayed']
|
||||
team['standing'] = all_data['table'][i]['intRank']
|
||||
team['points'] = all_data['table'][i]['intPoints']
|
||||
|
||||
premier_teams.append(team)
|
||||
|
||||
|
||||
if league_id == '4328':
|
||||
league = 'premier_league'
|
||||
elif league_id == '4380':
|
||||
league = 'NHL'
|
||||
elif league_id == '4387':
|
||||
league = 'NBA'
|
||||
elif league_id == '4391':
|
||||
league = 'NFL'
|
||||
|
||||
json.dump(premier_teams, open( "csv/sports/{}/team_stats.json".format(league), 'w+' ))
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
logf.write(str(e))
|
||||
logf.write('. file: ' + fname)
|
||||
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
||||
logf.write('. type: ' + str(exc_type))
|
||||
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
||||
|
||||
|
||||
def updateLeagueEvents(api_key, league_id, time):
|
||||
def updateLeagueEvents(api_key, league_ids, time):
|
||||
|
||||
if time == 'past':
|
||||
url ='https://www.thesportsdb.com/api/v1/json/{}/eventspastleague.php?id={}' #last 15 events on the league (premium only)
|
||||
f = open('csv/past_games.json')
|
||||
elif time == 'upcoming':
|
||||
url ='https://www.thesportsdb.com/api/v1/json/{}/eventsnextleague.php?id={}' #next 15 events on the league (premium only)
|
||||
f = open('csv/upcoming_games.json')
|
||||
elif time == 'live':
|
||||
f = open('csv/live_games.json')
|
||||
url = 'https://thesportsdb.com/api/v2/json/{}/livescore.php?l={}'
|
||||
|
||||
|
||||
all_settings = json.load(f)
|
||||
f.close()
|
||||
leagues = all_settings['leagues'].keys()
|
||||
leagues_info = {}
|
||||
try:
|
||||
if time == 'past':
|
||||
url ='https://www.thesportsdb.com/api/v1/json/{}/eventspastleague.php?id={}'.format(api_key, league_id) #last 15 events on the league (premium only)
|
||||
elif time == 'future':
|
||||
url ='https://www.thesportsdb.com/api/v1/json/{}/eventsnextleague.php?id={}'.format(api_key, league_id) #next 15 events on the league (premium only)
|
||||
elif time == 'live':
|
||||
url = 'https://thesportsdb.com/api/v2/json/{}/livescore.php?l={}'.format(api_key, league_id)
|
||||
|
||||
r = requests.get(url)
|
||||
all_data = r.json()
|
||||
|
||||
|
||||
events = []
|
||||
|
||||
if not all_data['events'] is None:
|
||||
for league in leagues:
|
||||
league_id = league_ids[league]
|
||||
url = url.format(api_key, league_id)
|
||||
|
||||
for i in range(len(all_data['events'])):
|
||||
event = {}
|
||||
event['date'] = all_data['events'][i]['dateEvent']
|
||||
|
||||
if time == 'live':
|
||||
event['time'] = all_data['events'][i]['strEventTime']
|
||||
event['progess'] = all_data['events'][i]['strProgress']
|
||||
event['status'] = all_data['events'][i]['strStatus']
|
||||
else:
|
||||
event['time'] = all_data['events'][i]['strTime']
|
||||
event['round'] = all_data['events'][i]['intRound']
|
||||
event['home_team'] = all_data['events'][i]['strHomeTeam']
|
||||
event['home_score'] = all_data['events'][i]['intHomeScore']
|
||||
event['away_team'] = all_data['events'][i]['strAwayTeam']
|
||||
event['away_score'] = all_data['events'][i]['intAwayScore']
|
||||
|
||||
events.append(event)
|
||||
r = requests.get(url)
|
||||
try:
|
||||
all_data = r.json()
|
||||
except: # there is no data available
|
||||
continue
|
||||
|
||||
|
||||
events = []
|
||||
|
||||
if not all_data['events'] is None:
|
||||
|
||||
for i in range(len(all_data['events'])):
|
||||
event = {}
|
||||
event['date'] = all_data['events'][i]['dateEvent']
|
||||
|
||||
if time == 'live':
|
||||
event['time'] = all_data['events'][i]['strEventTime']
|
||||
event['progess'] = all_data['events'][i]['strProgress']
|
||||
event['status'] = all_data['events'][i]['strStatus']
|
||||
else:
|
||||
event['time'] = all_data['events'][i]['strTime']
|
||||
event['round'] = all_data['events'][i]['intRound']
|
||||
event['home_team'] = all_data['events'][i]['strHomeTeam']
|
||||
event['home_score'] = all_data['events'][i]['intHomeScore']
|
||||
event['away_team'] = all_data['events'][i]['strAwayTeam']
|
||||
event['away_score'] = all_data['events'][i]['intAwayScore']
|
||||
|
||||
events.append(event)
|
||||
leagues_info[league] = events
|
||||
all_settings['leagues'] = leagues_info
|
||||
|
||||
if league_id == '4328':
|
||||
league = 'premier_league'
|
||||
elif league_id == '4380':
|
||||
league = 'NHL'
|
||||
elif league_id == '4387':
|
||||
league = 'NBA'
|
||||
elif league_id == '4391':
|
||||
league = 'NFL'
|
||||
|
||||
json.dump(events, open( "csv/sports/{}/{}_games.json".format(league, time), 'w+' ))
|
||||
json.dump(all_settings, open( "csv/{}_games.json".format(time), 'w+' ))
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
@ -496,19 +514,16 @@ def updateSports():
|
||||
#read user settings to decide which sprots to update
|
||||
api_key = '97436974'
|
||||
|
||||
prem_id = '4328' #prem
|
||||
NHL_id = '4380'
|
||||
NBA_id = '4387' #prem
|
||||
NFL_id = '4391'
|
||||
|
||||
for i in [NHL_id, prem_id]:
|
||||
updateLeagueEvents(api_key, i, 'live')
|
||||
updateLeagueEvents(api_key, i, 'past')
|
||||
updateLeagueEvents(api_key, i, 'future')
|
||||
|
||||
|
||||
league_ids = {'Premier League':'4328', 'NHL':'4380', 'NBA':'4387', 'NFL':'4391'}
|
||||
updateLeagueTables(api_key, league_ids)
|
||||
|
||||
updateLeagueEvents(api_key, league_ids, 'live')
|
||||
updateLeagueEvents(api_key, league_ids, 'past')
|
||||
updateLeagueEvents(api_key, league_ids, 'upcoming')
|
||||
|
||||
updateLeagueTable(api_key, prem_id)
|
||||
|
||||
|
||||
'https://www.thesportsdb.com/api/v1/json/{}/eventsnext.php?id=133602'.format(api_key) # next five events by team ID (paid) use this for upcoming team games
|
||||
|
||||
@ -533,12 +548,12 @@ def checkStocks(last_update, update_frequency):
|
||||
|
||||
if diff >= update_frequency:
|
||||
updated = True
|
||||
updateStockPrices()
|
||||
updateStocks()
|
||||
|
||||
|
||||
elif emptyInfo(symbols, stock_info): # if theres any empty stocks
|
||||
updated = True
|
||||
updateStockPrices()
|
||||
updateStocks()
|
||||
|
||||
|
||||
else:
|
||||
@ -549,7 +564,7 @@ def checkStocks(last_update, update_frequency):
|
||||
|
||||
if last_update < yday_closing:
|
||||
updated = True
|
||||
updateStockPrices()
|
||||
updateStocks()
|
||||
|
||||
return updated
|
||||
|
||||
@ -563,7 +578,7 @@ if __name__ == '__main__':
|
||||
max_stocks = 200
|
||||
max_crypto = 100
|
||||
|
||||
updateCrypto()
|
||||
updateSports()
|
||||
sys.exit()
|
||||
|
||||
newsapi = NewsApiClient(api_key='cf08652bd17647b89aaf469a1a8198a9')
|
||||
|
@ -1 +1 @@
|
||||
{"feature": "Stocks", "speed": "medium", "animation": "continuous", "percent": false, "point": true, "logos": true, "chart": false, "title": true, "symbols": {"NEO,USD": -1, "BTC,USD": -1, "ETH,BTC": -1, "ADA,GBP": -1}}
|
||||
{"feature": "Stocks", "speed": "medium", "animation": "continuous", "percent": false, "point": true, "logos": true, "chart": false, "title": true, "symbols": {"NEO,USD": {"current": 38.91, "24hr_change": -10.214531272448745}, "BTC,USD": {"current": 42257, "24hr_change": -6.370431167988443}, "ETH,BTC": {"current": 0.06787408, "24hr_change": -7.725740924537157}, "ADA,GBP": {"current": 1.69, "24hr_change": -0.5989543849163771}}}
|
@ -1 +1 @@
|
||||
{"feature": "Current Weather", "speed": "medium", "animation": "continuous", "temp": "celsius", "wind_speed": "miles/sec", "colour": "white", "city_colour": "yellow", "title": true, "cities": {"London": {"main_weather": "Rain", "description": "light rain", "temp": 29.71, "min_temp": 27.67, "max_temp": 29.71, "feels_like": 36.71, "humidity": 83, "clouds": 38, "wind_speed": 3.9, "wind_direction": 126, "visibility": 10000, "uv": 0, "rain_chance": 0.51}, "Hong Kong": -1}}
|
||||
{"feature": "Current Weather", "speed": "medium", "animation": "continuous", "temp": "celsius", "wind_speed": "miles/sec", "colour": "white", "city_colour": "yellow", "title": true, "locations": {"Moscow": {"main_weather": "Rain", "description": "light rain", "temp": 9.49, "min_temp": 7.65, "max_temp": 10.51, "feels_like": 7.58, "humidity": 84, "clouds": 100, "wind_speed": 3.59, "wind_direction": 183, "visibility": 10000, "uv": 0.78, "rain_chance": 0.2}, "London": {"main_weather": "Clouds", "description": "overcast clouds", "temp": 16.83, "min_temp": 16.43, "max_temp": 21.14, "feels_like": 16.97, "humidity": 92, "clouds": 90, "wind_speed": 1.54, "wind_direction": 0, "visibility": 10000, "uv": 1.35, "rain_chance": 0}, "Beijing": {"main_weather": "Clouds", "description": "overcast clouds", "temp": 22.72, "min_temp": 18.1, "max_temp": 22.92, "feels_like": 22.69, "humidity": 63, "clouds": 99, "wind_speed": 2.06, "wind_direction": 185, "visibility": 10000, "uv": 0.28, "rain_chance": 0.06}, "Tokyo": {"main_weather": "Clouds", "description": "scattered clouds", "temp": 22, "min_temp": 21.23, "max_temp": 24.98, "feels_like": 22.16, "humidity": 73, "clouds": 40, "wind_speed": 1.34, "wind_direction": 52, "visibility": 10000, "uv": 0, "rain_chance": 0.36}}, "current_weather": true}
|
@ -1 +1 @@
|
||||
{"feature": "Current Weather", "speed": "medium", "animation": "continuous", "temp": "celsius", "wind_speed": "miles/sec", "colour": "white", "city_colour": "yellow", "title": true, "cities": {"London": [{"main_weather": "Rain", "description": "light rain", "min_temp": 27.67, "max_temp": 29.71}, {"main_weather": "Clear", "description": "clear sky", "min_temp": 27.74, "max_temp": 30.05}, {"main_weather": "Rain", "description": "light rain", "min_temp": 28.06, "max_temp": 29.8}, {"main_weather": "Rain", "description": "moderate rain", "min_temp": 28.05, "max_temp": 29.34}, {"main_weather": "Rain", "description": "light rain", "min_temp": 27.91, "max_temp": 29.29}, {"main_weather": "Clouds", "description": "few clouds", "min_temp": 27.69, "max_temp": 29.03}, {"main_weather": "Rain", "description": "light rain", "min_temp": 27.56, "max_temp": 29.03}, {"main_weather": "Rain", "description": "light rain", "min_temp": 27.64, "max_temp": 29.29}], "Moscow": -1, "Tokyo": -1, "Beijing": -1}, "current_weather": true}
|
||||
{"feature": "Current Weather", "speed": "medium", "animation": "continuous", "temp": "celsius", "wind_speed": "miles/sec", "colour": "white", "city_colour": "yellow", "title": true, "locations": {"Moscow": [{"main_weather": "Rain", "description": "light rain", "min_temp": 7.65, "max_temp": 10.51}, {"main_weather": "Rain", "description": "light rain", "min_temp": 8.46, "max_temp": 9.36}, {"main_weather": "Rain", "description": "light rain", "min_temp": 7.77, "max_temp": 10.32}, {"main_weather": "Clouds", "description": "overcast clouds", "min_temp": 6.71, "max_temp": 8.53}, {"main_weather": "Clouds", "description": "overcast clouds", "min_temp": 4.13, "max_temp": 9.61}, {"main_weather": "Clear", "description": "clear sky", "min_temp": 5.87, "max_temp": 12.32}, {"main_weather": "Clouds", "description": "few clouds", "min_temp": 5.82, "max_temp": 12.78}, {"main_weather": "Clouds", "description": "scattered clouds", "min_temp": 6.06, "max_temp": 13.3}], "London": [{"main_weather": "Clouds", "description": "overcast clouds", "min_temp": 16.43, "max_temp": 21.14}, {"main_weather": "Clouds", "description": "few clouds", "min_temp": 14.49, "max_temp": 22.91}, {"main_weather": "Rain", "description": "moderate rain", "min_temp": 13.23, "max_temp": 18.43}, {"main_weather": "Rain", "description": "moderate rain", "min_temp": 12.03, "max_temp": 17.55}, {"main_weather": "Rain", "description": "light rain", "min_temp": 8.64, "max_temp": 15.93}, {"main_weather": "Rain", "description": "light rain", "min_temp": 11.89, "max_temp": 17.45}, {"main_weather": "Clouds", "description": "overcast clouds", "min_temp": 12.55, "max_temp": 18.84}, {"main_weather": "Rain", "description": "moderate rain", "min_temp": 10.94, "max_temp": 15.26}], "Beijing": [{"main_weather": "Rain", "description": "light rain", "min_temp": 18.1, "max_temp": 22.92}, {"main_weather": "Clouds", "description": "overcast clouds", "min_temp": 17.48, "max_temp": 19.87}, {"main_weather": "Clouds", "description": "overcast clouds", "min_temp": 16.77, "max_temp": 25.9}, {"main_weather": "Rain", "description": "light rain", "min_temp": 18.78, "max_temp": 23.9}, {"main_weather": "Clear", "description": "clear sky", "min_temp": 18.28, "max_temp": 28.13}, {"main_weather": "Rain", "description": "light rain", "min_temp": 17.59, "max_temp": 21.57}, {"main_weather": "Rain", "description": "light rain", "min_temp": 17.99, "max_temp": 20.7}, {"main_weather": "Rain", "description": "light rain", "min_temp": 17.22, "max_temp": 26.37}], "Tokyo": [{"main_weather": "Rain", "description": "light rain", "min_temp": 21.23, "max_temp": 24.98}, {"main_weather": "Clouds", "description": "overcast clouds", "min_temp": 20.36, "max_temp": 23.48}, {"main_weather": "Clouds", "description": "few clouds", "min_temp": 19.58, "max_temp": 25.58}, {"main_weather": "Clouds", "description": "scattered clouds", "min_temp": 19.32, "max_temp": 25.64}, {"main_weather": "Clouds", "description": "broken clouds", "min_temp": 19.51, "max_temp": 23.75}, {"main_weather": "Rain", "description": "light rain", "min_temp": 20.69, "max_temp": 23.92}, {"main_weather": "Rain", "description": "light rain", "min_temp": 20.13, "max_temp": 27.1}, {"main_weather": "Clouds", "description": "broken clouds", "min_temp": 20.75, "max_temp": 25.79}]}, "current_weather": true}
|
@ -1 +1 @@
|
||||
{"feature": "Stocks", "speed": "medium", "animation": "continuous", "percent": false, "point": true, "logos": true, "chart": false, "title": true, "symbols": {"NZD,USD": -1, "CAD,USD": -1, "GBP,BTC": -1}}
|
||||
{"feature": "Stocks", "speed": "medium", "animation": "continuous", "percent": false, "point": true, "logos": true, "chart": false, "title": true, "symbols": {"NZD,GBP": {"current": 1.9471, "24hr_change": 0.0029000000000001247}, "CAD,USD": {"current": 1.271, "24hr_change": 0.0033999999999998476}}}
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -1 +1 @@
|
||||
{"feature": "Stocks", "speed": "medium", "animation": "continuous", "percent": true, "point": false, "logos": true, "chart": false, "title": true, "symbols": {"MSFT": {"current": 300.03, "opening": 298.845}, "GOOG": {"current": 2839.75, "opening": 2832}}}
|
||||
{"feature": "Stocks", "speed": "medium", "animation": "continuous", "percent": true, "point": false, "logos": true, "chart": false, "title": true, "symbols": {"MSFT": {"current": 299.35, "opening": 298.21}, "GOOG": {"current": 2854.185, "opening": 2821.99}}}
|
@ -1 +0,0 @@
|
||||
{"feature": "Sports (Upcoming Games)", "speed": "medium", "animation": "continuous", "title": true, "leagues": {"NFL": -1, "NBA": -1, "NHA": -1, "Premier League": -1}}
|
File diff suppressed because one or more lines are too long
5
log.txt
5
log.txt
@ -1,5 +0,0 @@
|
||||
'bitcoin'. file: api_caller.py. line: 221. type: <class 'KeyError'>
|
||||
Traceback (most recent call last):
|
||||
File "api_caller.py", line 221, in updateCrypto
|
||||
info = coin_info[coin]
|
||||
KeyError: 'bitcoin'
|
@ -74,7 +74,7 @@ def index():
|
||||
upcoming_games = json.load(open('csv/upcoming_games.json', 'r'))
|
||||
live_games = json.load(open('csv/live_games.json', 'r'))
|
||||
past_games = json.load(open('csv/past_games.json', 'r'))
|
||||
team_stats = json.load(open('csv/team_stats.json', 'r'))
|
||||
team_stats = json.load(open('csv/league_tables.json', 'r'))
|
||||
image_settings = json.load(open('csv/image_settings.json', 'r'))
|
||||
GIF_settings = json.load(open('csv/GIF_settings.json', 'r'))
|
||||
|
||||
@ -424,7 +424,7 @@ def save_weather_settings(input_settings):
|
||||
current_settings['current_weather'] = input_settings['current_weather']
|
||||
|
||||
|
||||
current_settings = combine_dict(current_settings, input_settings['cities'], 'cities')
|
||||
current_settings = combine_dict(current_settings, input_settings['locations'], 'locations')
|
||||
|
||||
json.dump(current_settings, open('csv/' + filename, 'w+'))
|
||||
|
||||
@ -455,14 +455,14 @@ def save_sports_settings(input_settings):
|
||||
elif feature == 'Sports (Live Games)':
|
||||
filename = 'live_games.json'
|
||||
elif feature == 'Sports (Team Stats)':
|
||||
filename = 'team_stats.json'
|
||||
filename = 'league_tables.json'
|
||||
|
||||
current_settings = json.load(open('csv/' + filename, 'r'))
|
||||
|
||||
current_settings['speed'] = input_settings['speed'].lower()
|
||||
current_settings['animation'] = input_settings['animation'].lower()
|
||||
current_settings['title'] = input_settings['title']
|
||||
|
||||
current_settings['feature'] = input_settings['feature']
|
||||
|
||||
current_settings = combine_dict(current_settings, input_settings['leagues'], 'leagues')
|
||||
|
||||
|
@ -729,9 +729,9 @@ function getWeatherSettings(page) {
|
||||
}
|
||||
|
||||
|
||||
let cities_el = page.querySelectorAll(".city-list")[0];
|
||||
let cities = getListItems(cities_el);
|
||||
settings['cities'] = cities;
|
||||
let locations_el = page.querySelectorAll(".location-list")[0];
|
||||
let locations = getListItems(locations_el);
|
||||
settings['locations'] = locations;
|
||||
|
||||
return settings;
|
||||
}
|
||||
|
@ -1000,9 +1000,9 @@
|
||||
<div class="features-div-two">
|
||||
<ul
|
||||
id="current-weather-features"
|
||||
class="display-features-list text-dark city-list"
|
||||
class="display-features-list text-dark location-list"
|
||||
>
|
||||
{% for f in current_weather.cities.keys() %}
|
||||
{% for f in current_weather.locations.keys() %}
|
||||
<li>{{f}}</li>
|
||||
{% endfor%}
|
||||
</ul>
|
||||
@ -1204,9 +1204,9 @@
|
||||
<div class="features-div-two">
|
||||
<ul
|
||||
id="daily-forecast-features"
|
||||
class="display-features-list text-dark city-list"
|
||||
class="display-features-list text-dark location-list"
|
||||
>
|
||||
{% for f in daily_weather.cities.keys() %}
|
||||
{% for f in daily_weather.locations.keys() %}
|
||||
<li>{{f}}</li>
|
||||
{% endfor%}
|
||||
</ul>
|
||||
|
Loading…
Reference in New Issue
Block a user