weather api calls consolidated and multiple cities can be used
This commit is contained in:
367
api_caller.py
367
api_caller.py
@@ -9,6 +9,8 @@ import sys, os, base64, hashlib, hmac, select
|
||||
import requests
|
||||
from pycoingecko import CoinGeckoAPI
|
||||
from newsapi import NewsApiClient
|
||||
import traceback
|
||||
from geopy import geocoders
|
||||
|
||||
def getInput(Block=False):
|
||||
if Block or select.select([sys.stdin], [], [], 0) == ([sys.stdin], [], []):
|
||||
@@ -46,7 +48,7 @@ def readCSV(file_path, max_stocks):
|
||||
|
||||
return symbols, stock_info
|
||||
|
||||
def readCryptoCSV(file_path, max_stocks):
|
||||
def readCryptoCSV(file_path, max_crypto):
|
||||
|
||||
symbols = []
|
||||
names = []
|
||||
@@ -58,23 +60,24 @@ def readCryptoCSV(file_path, max_stocks):
|
||||
unique_bases = []
|
||||
for row in CSV:
|
||||
print(row)
|
||||
if i < max_stocks:
|
||||
i += 1
|
||||
|
||||
try:
|
||||
symbol, name, base, current_price, opening_price = row
|
||||
symbols.append(symbol)
|
||||
names.append(name)
|
||||
stock_info[name] = [symbol, base, current_price, opening_price]
|
||||
if base not in unique_bases:
|
||||
unique_bases.append(base)
|
||||
except:
|
||||
symbol, name, base = row
|
||||
if base not in unique_bases:
|
||||
unique_bases.append(base)
|
||||
symbols.append(symbol)
|
||||
names.append(name)
|
||||
stock_info[name] = [symbol, base]
|
||||
if i >= max_stocks:
|
||||
break
|
||||
i += 1
|
||||
|
||||
try:
|
||||
symbol, name, base, current_price, opening_price = row
|
||||
symbols.append(symbol)
|
||||
names.append(name)
|
||||
stock_info[name] = [symbol, base, current_price, opening_price]
|
||||
if base not in unique_bases:
|
||||
unique_bases.append(base)
|
||||
except:
|
||||
symbol, name, base = row
|
||||
if base not in unique_bases:
|
||||
unique_bases.append(base)
|
||||
symbols.append(symbol)
|
||||
names.append(name)
|
||||
stock_info[name] = [symbol, base]
|
||||
else:
|
||||
print('max stocks exceeded')
|
||||
break
|
||||
@@ -120,12 +123,19 @@ def updateStockPrices():
|
||||
print('API called successfully')
|
||||
|
||||
except Exception as e:
|
||||
print("Could not fetch data - API CALLS REACHED? - Will display old image")
|
||||
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
logf.write(str(e))
|
||||
logf.write('. file: ' + fname)
|
||||
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
||||
logf.write('. type: ' + str(exc_type))
|
||||
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
||||
print(e)
|
||||
apiCalledError = True
|
||||
|
||||
|
||||
def updateStockPricesIEX():
|
||||
|
||||
iexAPIkey = 'pk_68ef6a15902c41f887f0b544a0ca17cf' #IEX
|
||||
iexSandboxAPIkey = 'Tpk_0078dff413ef4f979137f7111452dc4b'
|
||||
max_stocks = 200
|
||||
@@ -185,6 +195,13 @@ def updateStockPricesIEX():
|
||||
CSV.write(symbol + ',' + str(current_prices[i]) + ',' + str(opening_prices[i]) + '\n')
|
||||
CSV.close()
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
logf.write(str(e))
|
||||
logf.write('. file: ' + fname)
|
||||
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
||||
logf.write('. type: ' + str(exc_type))
|
||||
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
||||
print(e)
|
||||
|
||||
|
||||
@@ -205,6 +222,13 @@ def updateCrypto():
|
||||
CSV.write(info[0] + ',' + coin + ',' + info[1] + ',' +str(response[coin][info[1]]) + ',' + str(response[coin]['usd_24h_change']) + '\n')
|
||||
CSV.close()
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
logf.write(str(e))
|
||||
logf.write('. file: ' + fname)
|
||||
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
||||
logf.write('. type: ' + str(exc_type))
|
||||
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
||||
print(e)
|
||||
|
||||
|
||||
@@ -235,59 +259,87 @@ def updateNews():
|
||||
|
||||
CSV.close()
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
logf.write(str(e))
|
||||
logf.write('. file: ' + fname)
|
||||
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
||||
logf.write('. type: ' + str(exc_type))
|
||||
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
||||
print(e)
|
||||
|
||||
def updateWeather():
|
||||
|
||||
max_cities = 30
|
||||
api_key = 'bd5d5096a5ba30bbcfb57ead42ab3fee'
|
||||
try:
|
||||
gn = geocoders.GeoNames(username='fintic')
|
||||
|
||||
|
||||
f = open( "csv/weather_location.txt", 'r' )
|
||||
location = f.read()
|
||||
line = next(f)
|
||||
locations = line.split(',')
|
||||
f.close()
|
||||
url = "https://api.openweathermap.org/data/2.5/weather?q={}&units=metric&appid={}".format(location, api_key)
|
||||
r = requests.get(url)
|
||||
weather = r.json()
|
||||
print(weather)
|
||||
current_weather = {}
|
||||
|
||||
coords = weather['coord']
|
||||
lat = coords['lat']
|
||||
lon = coords['lon']
|
||||
url = 'https://api.openweathermap.org/data/2.5/onecall?lat={}&units=metric&lon={}&appid={}'.format(lat, lon, api_key)
|
||||
r = requests.get(url)
|
||||
|
||||
current_weather['main_weather'] = weather['weather'][0]['main']
|
||||
current_weather['description'] = weather['weather'][0]['description']
|
||||
current_weather['temp'] = weather['main']['temp']
|
||||
current_weather['min_temp'] = weather['main']['temp_min']
|
||||
current_weather['max_temp'] = weather['main']['temp_max']
|
||||
current_weather['feels_like'] = weather['main']['feels_like']
|
||||
current_weather['humidity'] = weather['main']['humidity']
|
||||
current_weather['clouds'] = weather['clouds']['all']
|
||||
current_weather['wind_speed'] = weather['wind']['speed']
|
||||
current_weather['wind_direction'] = weather['wind']['deg']
|
||||
current_weather['visibility'] = weather['visibility']
|
||||
current_weather['uv'] = r.json()['current']['uvi']
|
||||
current_weather['rain_chance'] = r.json()['hourly'][0]['pop']
|
||||
current_weathers = []
|
||||
daily_weathers = []
|
||||
|
||||
|
||||
for location in locations:
|
||||
loc = gn.geocode(location)
|
||||
|
||||
|
||||
|
||||
current_weather = {}
|
||||
|
||||
lat = loc.latitude
|
||||
lon = loc.longitude
|
||||
url = 'https://api.openweathermap.org/data/2.5/onecall?lat={}&units=metric&lon={}&appid={}'.format(lat, lon, api_key)
|
||||
r = requests.get(url)
|
||||
|
||||
weather = r.json()['current']
|
||||
|
||||
current_weather['main_weather'] = weather['weather'][0]['main']
|
||||
current_weather['description'] = weather['weather'][0]['description']
|
||||
current_weather['temp'] = weather['temp']
|
||||
current_weather['min_temp'] = r.json()['daily'][0]['temp']['min']
|
||||
current_weather['max_temp'] = r.json()['daily'][0]['temp']['max']
|
||||
current_weather['feels_like'] = weather['feels_like']
|
||||
current_weather['humidity'] = weather['humidity']
|
||||
current_weather['clouds'] = weather['clouds']
|
||||
current_weather['wind_speed'] = weather['wind_speed']
|
||||
current_weather['wind_direction'] = weather['wind_deg']
|
||||
current_weather['visibility'] = weather['visibility']
|
||||
current_weather['uv'] = weather['uvi']
|
||||
current_weather['rain_chance'] = r.json()['hourly'][0]['pop']
|
||||
|
||||
|
||||
|
||||
current_weathers.append(current_weather)
|
||||
|
||||
daily_weather = []
|
||||
daily = r.json()['daily']
|
||||
|
||||
for day in daily:
|
||||
dct = {}
|
||||
dct['main_weather'] = day['weather'][0]['main']
|
||||
dct['description'] = day['weather'][0]['description']
|
||||
dct['min_temp'] = day['temp']['min']
|
||||
dct['max_temp'] = day['temp']['max']
|
||||
daily_weather.append(dct)
|
||||
|
||||
daily_weathers.append(daily_weather)
|
||||
|
||||
json.dump( current_weather, open( "csv/current_weather.json", 'w+' ))
|
||||
|
||||
|
||||
daily_weather = []
|
||||
daily = r.json()['daily']
|
||||
|
||||
for day in daily:
|
||||
dct = {}
|
||||
dct['main_weather'] = day['weather'][0]['main']
|
||||
dct['description'] = day['weather'][0]['description']
|
||||
dct['min_temp'] = day['temp']['min']
|
||||
dct['max_temp'] = day['temp']['max']
|
||||
daily_weather.append(dct)
|
||||
|
||||
json.dump( daily_weather, open( "csv/daily_weather.json", 'w+' ))
|
||||
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
logf.write(str(e))
|
||||
logf.write('. file: ' + fname)
|
||||
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
||||
logf.write('. type: ' + str(exc_type))
|
||||
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
||||
print(e)
|
||||
|
||||
|
||||
@@ -324,96 +376,123 @@ def updateForex():
|
||||
|
||||
json.dump([base, c_dict], open( "csv/currency.json", 'w+' ))
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
logf.write(str(e))
|
||||
logf.write('. file: ' + fname)
|
||||
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
||||
logf.write('. type: ' + str(exc_type))
|
||||
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
||||
print(e)
|
||||
|
||||
def updateLeagueTable(api_key, league_id):
|
||||
url = 'https://www.thesportsdb.com/api/v1/json/{}/lookuptable.php?l={}&s=2020-2021'.format(api_key, league_id)
|
||||
|
||||
r = requests.get(url)
|
||||
all_data = r.json()
|
||||
|
||||
|
||||
|
||||
premier_teams = []
|
||||
|
||||
|
||||
for i in range(len(all_data['table'])):
|
||||
team = {}
|
||||
try:
|
||||
url = 'https://www.thesportsdb.com/api/v1/json/{}/lookuptable.php?l={}&s=2020-2021'.format(api_key, league_id)
|
||||
|
||||
if all_data['table'][i]['strTeam'] == "Calgary Flames":
|
||||
print(all_data['table'][i]['dateUpdated'], all_data['table'][i]['intPoints'])
|
||||
r = requests.get(url)
|
||||
all_data = r.json()
|
||||
|
||||
|
||||
|
||||
premier_teams = []
|
||||
|
||||
|
||||
for i in range(len(all_data['table'])):
|
||||
team = {}
|
||||
|
||||
team['name'] = all_data['table'][i]['strTeam']
|
||||
team['wins'] = all_data['table'][i]['intWin']
|
||||
team['loss'] = all_data['table'][i]['intLoss']
|
||||
team['draw'] = all_data['table'][i]['intDraw']
|
||||
team['played'] = all_data['table'][i]['intPlayed']
|
||||
team['standing'] = all_data['table'][i]['intRank']
|
||||
team['points'] = all_data['table'][i]['intPoints']
|
||||
if all_data['table'][i]['strTeam'] == "Calgary Flames":
|
||||
print(all_data['table'][i]['dateUpdated'], all_data['table'][i]['intPoints'])
|
||||
|
||||
team['name'] = all_data['table'][i]['strTeam']
|
||||
team['wins'] = all_data['table'][i]['intWin']
|
||||
team['loss'] = all_data['table'][i]['intLoss']
|
||||
team['draw'] = all_data['table'][i]['intDraw']
|
||||
team['played'] = all_data['table'][i]['intPlayed']
|
||||
team['standing'] = all_data['table'][i]['intRank']
|
||||
team['points'] = all_data['table'][i]['intPoints']
|
||||
|
||||
premier_teams.append(team)
|
||||
|
||||
|
||||
if league_id == '4328':
|
||||
league = 'premier_league'
|
||||
elif league_id == '4380':
|
||||
league = 'NHL'
|
||||
elif league_id == '4387':
|
||||
league = 'NBA'
|
||||
elif league_id == '4391':
|
||||
league = 'NFL'
|
||||
|
||||
premier_teams.append(team)
|
||||
|
||||
|
||||
if league_id == '4328':
|
||||
league = 'premier_league'
|
||||
elif league_id == '4380':
|
||||
league = 'NHL'
|
||||
elif league_id == '4387':
|
||||
league = 'NBA'
|
||||
elif league_id == '4391':
|
||||
league = 'NFL'
|
||||
|
||||
json.dump(premier_teams, open( "csv/sports/{}/team_stats.json".format(league), 'w+' ))
|
||||
json.dump(premier_teams, open( "csv/sports/{}/team_stats.json".format(league), 'w+' ))
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
logf.write(str(e))
|
||||
logf.write('. file: ' + fname)
|
||||
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
||||
logf.write('. type: ' + str(exc_type))
|
||||
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
||||
print(e)
|
||||
|
||||
def updateLeagueEvents(api_key, league_id, time):
|
||||
|
||||
if time == 'past':
|
||||
url ='https://www.thesportsdb.com/api/v1/json/{}/eventspastleague.php?id={}'.format(api_key, league_id) #last 15 events on the league (premium only)
|
||||
elif time == 'future':
|
||||
url ='https://www.thesportsdb.com/api/v1/json/{}/eventsnextleague.php?id={}'.format(api_key, league_id) #next 15 events on the league (premium only)
|
||||
elif time == 'live':
|
||||
url = 'https://thesportsdb.com/api/v2/json/{}/livescore.php?l={}'.format(api_key, league_id)
|
||||
|
||||
r = requests.get(url)
|
||||
all_data = r.json()
|
||||
|
||||
print()
|
||||
#print(all_data['events'][0].keys())
|
||||
#print([all_data['events'][i]['strTimestamp'] for i in range(len(all_data['events']))])
|
||||
|
||||
events = []
|
||||
|
||||
if not all_data['events'] is None:
|
||||
try:
|
||||
if time == 'past':
|
||||
url ='https://www.thesportsdb.com/api/v1/json/{}/eventspastleague.php?id={}'.format(api_key, league_id) #last 15 events on the league (premium only)
|
||||
elif time == 'future':
|
||||
url ='https://www.thesportsdb.com/api/v1/json/{}/eventsnextleague.php?id={}'.format(api_key, league_id) #next 15 events on the league (premium only)
|
||||
elif time == 'live':
|
||||
url = 'https://thesportsdb.com/api/v2/json/{}/livescore.php?l={}'.format(api_key, league_id)
|
||||
|
||||
for i in range(len(all_data['events'])):
|
||||
event = {}
|
||||
event['date'] = all_data['events'][i]['dateEvent']
|
||||
r = requests.get(url)
|
||||
all_data = r.json()
|
||||
|
||||
print()
|
||||
#print(all_data['events'][0].keys())
|
||||
#print([all_data['events'][i]['strTimestamp'] for i in range(len(all_data['events']))])
|
||||
|
||||
events = []
|
||||
|
||||
if not all_data['events'] is None:
|
||||
|
||||
if time == 'live':
|
||||
event['time'] = all_data['events'][i]['strEventTime']
|
||||
event['progess'] = all_data['events'][i]['strProgress']
|
||||
event['status'] = all_data['events'][i]['strStatus']
|
||||
else:
|
||||
event['time'] = all_data['events'][i]['strTime']
|
||||
event['round'] = all_data['events'][i]['intRound']
|
||||
event['home_team'] = all_data['events'][i]['strHomeTeam']
|
||||
event['home_score'] = all_data['events'][i]['intHomeScore']
|
||||
event['away_team'] = all_data['events'][i]['strAwayTeam']
|
||||
event['away_score'] = all_data['events'][i]['intAwayScore']
|
||||
|
||||
events.append(event)
|
||||
|
||||
|
||||
if league_id == '4328':
|
||||
league = 'premier_league'
|
||||
elif league_id == '4380':
|
||||
league = 'NHL'
|
||||
elif league_id == '4387':
|
||||
league = 'NBA'
|
||||
elif league_id == '4391':
|
||||
league = 'NFL'
|
||||
|
||||
json.dump(events, open( "csv/sports/{}/{}_games.json".format(league, time), 'w+' ))
|
||||
for i in range(len(all_data['events'])):
|
||||
event = {}
|
||||
event['date'] = all_data['events'][i]['dateEvent']
|
||||
|
||||
if time == 'live':
|
||||
event['time'] = all_data['events'][i]['strEventTime']
|
||||
event['progess'] = all_data['events'][i]['strProgress']
|
||||
event['status'] = all_data['events'][i]['strStatus']
|
||||
else:
|
||||
event['time'] = all_data['events'][i]['strTime']
|
||||
event['round'] = all_data['events'][i]['intRound']
|
||||
event['home_team'] = all_data['events'][i]['strHomeTeam']
|
||||
event['home_score'] = all_data['events'][i]['intHomeScore']
|
||||
event['away_team'] = all_data['events'][i]['strAwayTeam']
|
||||
event['away_score'] = all_data['events'][i]['intAwayScore']
|
||||
|
||||
events.append(event)
|
||||
|
||||
|
||||
if league_id == '4328':
|
||||
league = 'premier_league'
|
||||
elif league_id == '4380':
|
||||
league = 'NHL'
|
||||
elif league_id == '4387':
|
||||
league = 'NBA'
|
||||
elif league_id == '4391':
|
||||
league = 'NFL'
|
||||
|
||||
json.dump(events, open( "csv/sports/{}/{}_games.json".format(league, time), 'w+' ))
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
logf.write(str(e))
|
||||
logf.write('. file: ' + fname)
|
||||
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
||||
logf.write('. type: ' + str(exc_type))
|
||||
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
||||
print(e)
|
||||
|
||||
def updateSports():
|
||||
#read user settings to decide which sprots to update
|
||||
@@ -479,7 +558,7 @@ def checkStocks(last_update, update_frequency):
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
updateWeather()
|
||||
max_stocks = 200
|
||||
max_crypto = 100
|
||||
|
||||
@@ -548,12 +627,12 @@ if __name__ == '__main__':
|
||||
last_updates['weather'] = weather_time
|
||||
|
||||
|
||||
# weather
|
||||
# news
|
||||
news_time = datetime.strptime(last_updates['news'], "%d/%m/%Y %H:%M:%S")
|
||||
news_frequency = update_frequencies['news']
|
||||
|
||||
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
|
||||
diff = (NY_time - weather_time).total_seconds()/60 #minutes
|
||||
diff = (NY_time - news_time).total_seconds()/60 #minutes
|
||||
if diff >= update_frequencies['news']:
|
||||
news_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
|
||||
updateNews()
|
||||
@@ -572,7 +651,13 @@ if __name__ == '__main__':
|
||||
updateForex()
|
||||
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
logf.write(str(e))
|
||||
logf.write('. file: ' + fname)
|
||||
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
||||
logf.write('. type: ' + str(exc_type))
|
||||
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
||||
print(e)
|
||||
|
||||
|
||||
|
Reference in New Issue
Block a user