fintic-tracker/api_caller.py

690 lines
24 KiB
Python
Raw Normal View History

2021-05-05 15:22:01 +00:00
import finnhub
import time
import csv
2021-05-05 19:26:56 +00:00
import pytz
2021-06-09 18:06:21 +00:00
from datetime import datetime, timedelta
2021-05-27 19:10:57 +00:00
import json
2021-05-06 19:59:27 +00:00
import datetime as dt
2021-05-27 19:10:57 +00:00
import sys, os, base64, hashlib, hmac, select
2021-05-08 11:10:05 +00:00
import requests
2021-05-14 12:02:22 +00:00
from pycoingecko import CoinGeckoAPI
2021-05-21 13:24:37 +00:00
from newsapi import NewsApiClient
import traceback
from geopy import geocoders
2021-05-21 13:24:37 +00:00
2021-05-27 19:10:57 +00:00
def getInput(Block=False):
if Block or select.select([sys.stdin], [], [], 0) == ([sys.stdin], [], []):
msg = sys.stdin.read(1)
#sys.stdin.flush()
else:
msg = ''
return msg
2021-05-05 19:26:56 +00:00
2021-05-14 12:02:22 +00:00
def readCSV(file_path, max_stocks):
2021-05-06 19:59:27 +00:00
symbols = []
stock_info = {}
2021-05-14 12:02:22 +00:00
f = open(file_path, 'r')
2021-05-06 19:59:27 +00:00
CSV = csv.reader(f)
next(CSV)
2021-05-07 18:39:42 +00:00
i = 0
2021-05-06 19:59:27 +00:00
for row in CSV:
2021-05-07 18:39:42 +00:00
if i < max_stocks:
i += 1
try:
symbol, current_price, opening_price = row
symbols.append(symbol)
stock_info[symbol] = [current_price, opening_price]
except:
symbol = row[0]
symbols.append(symbol)
stock_info[symbol] = []
else:
print('max stocks exceeded')
break
2021-05-06 19:59:27 +00:00
f.close()
return symbols, stock_info
2021-05-24 19:59:42 +00:00
def readCryptoCSV(file_path, max_crypto):
2021-05-24 19:59:42 +00:00
symbols = []
names = []
stock_info = {}
f = open(file_path, 'r')
CSV = csv.reader(f)
next(CSV)
i = 0
unique_bases = []
2021-05-24 19:59:42 +00:00
for row in CSV:
print(row)
if i >= max_stocks:
break
i += 1
try:
symbol, name, base, current_price, opening_price = row
symbols.append(symbol)
names.append(name)
stock_info[name] = [symbol, base, current_price, opening_price]
if base not in unique_bases:
unique_bases.append(base)
except:
symbol, name, base = row
if base not in unique_bases:
unique_bases.append(base)
symbols.append(symbol)
names.append(name)
stock_info[name] = [symbol, base]
2021-05-24 19:59:42 +00:00
else:
print('max stocks exceeded')
break
f.close()
return names, stock_info, unique_bases
2021-05-06 19:59:27 +00:00
def emptyInfo(symbols, stock_info):
update = False
for symbol in symbols:
if len(stock_info[symbol]) == 1: # stock with no info
update = True
return update
def updateUpdate(NY_time):
NY_str = NY_time.strftime("%d/%m/%Y %H:%M:%S")
f = open('csv/last_update.csv', 'w+')
f.write(NY_str + '\n')
f.close()
2021-05-05 19:26:56 +00:00
2021-06-27 11:07:47 +00:00
def updateStockPrices():
2021-06-29 20:17:52 +00:00
max_stocks = 200
2021-06-27 11:07:47 +00:00
finnhubsandboxAPIkey = "sandbox_c24qddqad3ickpckgg8g" #Finnhub
finnhubAPIkey = "c24qddqad3ickpckgg80" #Finnhub
finnhubClient = finnhub.Client(api_key=finnhubAPIkey)
2021-05-06 19:59:27 +00:00
2021-06-27 11:07:47 +00:00
symbols, stock_info = readCSV('csv/tickers.csv', max_stocks)
2021-05-06 19:59:27 +00:00
try:
2021-05-08 11:10:05 +00:00
quotes = [finnhubClient.quote(symbol) for symbol in symbols]
current_prices = [quote['c'] for quote in quotes]
opening_prices = [quote['o'] for quote in quotes]
2021-05-14 12:02:22 +00:00
2021-05-06 19:59:27 +00:00
CSV = open('csv/tickers.csv', 'w+')
CSV.write('name,current,opening\n')
for i, symbol in enumerate(symbols):
2021-05-07 18:39:42 +00:00
2021-05-06 19:59:27 +00:00
CSV.write(symbol + ',' + str(current_prices[i]) + ',' + str(opening_prices[i]) + '\n')
CSV.close()
print('API called successfully')
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
logf.write(str(e))
logf.write('. file: ' + fname)
logf.write('. line: ' + str(exc_tb.tb_lineno))
logf.write('. type: ' + str(exc_type))
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
2021-05-06 19:59:27 +00:00
print(e)
2021-05-08 11:10:05 +00:00
2021-06-27 11:07:47 +00:00
def updateStockPricesIEX():
2021-06-27 11:07:47 +00:00
iexAPIkey = 'pk_68ef6a15902c41f887f0b544a0ca17cf' #IEX
iexSandboxAPIkey = 'Tpk_0078dff413ef4f979137f7111452dc4b'
max_stocks = 200
symbols, stock_info = readCSV('csv/tickers.csv', max_stocks)
2021-06-19 09:53:09 +00:00
try:
2021-06-29 20:17:52 +00:00
'''
2021-06-19 09:53:09 +00:00
symbols_str = ','.join(symbols)
2021-06-29 20:17:52 +00:00
print(symbols)
print(symbols_str)
2021-05-08 11:10:05 +00:00
method = 'GET'
host = 'https://cloud.iexapis.com/stable'
lastEndpoint = '/tops/last'
2021-06-19 09:53:09 +00:00
querystring = '?symbols=' + symbols_str +'&token='+iexAPIkey
last_request_url = host + lastEndpoint + querystring
2021-05-08 11:10:05 +00:00
print('\nBEGIN REQUEST++++++++++++++++++++++++++++++++++++')
2021-06-19 09:53:09 +00:00
print('Request URL = ' + last_request_url)
last_response = requests.get(last_request_url)
2021-06-29 20:17:52 +00:00
print('done')
2021-06-19 09:53:09 +00:00
if last_response.status_code == 200:
print('last success')
2021-05-08 11:10:05 +00:00
2021-06-29 20:17:52 +00:00
2021-06-19 09:53:09 +00:00
for stock in last_response.json():
current_prices.append(stock['price'])
2021-06-29 20:17:52 +00:00
'''
2021-05-08 11:10:05 +00:00
2021-06-29 20:17:52 +00:00
current_prices = []
opening_prices = []
2021-06-19 09:53:09 +00:00
for symbol in symbols:
2021-06-29 20:17:52 +00:00
print(symbol)
2021-06-19 09:53:09 +00:00
method = 'GET'
host = 'https://cloud.iexapis.com/stable'
2021-06-29 20:17:52 +00:00
2021-06-19 09:53:09 +00:00
intradayEndpoint = '/stock/'+ symbol+ '/intraday-prices'
querystring = '?chartIEXOnly=true&token='+iexAPIkey
intraday_request_url = host + intradayEndpoint + querystring
print('\nBEGIN REQUEST++++++++++++++++++++++++++++++++++++')
print('Request URL = ' + intraday_request_url)
intraday_response = requests.get(intraday_request_url)
print('\nRESPONSE++++++++++++++++++++++++++++++++++++')
2021-06-29 20:17:52 +00:00
print('Response code: \n',intraday_response)
2021-06-19 09:53:09 +00:00
print(intraday_response.text)
2021-06-29 20:17:52 +00:00
opn = intraday_response.json()[0]['open']
current = intraday_response.json()[-1]['close']
opening_prices.append(opn)
current_prices.append(current)
'''
2021-06-19 09:53:09 +00:00
for time_point in intraday_response.json():
print(time_point['date'])
print(time_point['open'])
2021-06-29 20:17:52 +00:00
'''
2021-06-19 09:53:09 +00:00
CSV = open('csv/tickers.csv', 'w+')
CSV.write('name,current,opening\n')
for i, symbol in enumerate(symbols):
CSV.write(symbol + ',' + str(current_prices[i]) + ',' + str(opening_prices[i]) + '\n')
CSV.close()
2021-06-29 20:17:52 +00:00
2021-06-19 09:53:09 +00:00
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
logf.write(str(e))
logf.write('. file: ' + fname)
logf.write('. line: ' + str(exc_tb.tb_lineno))
logf.write('. type: ' + str(exc_type))
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
2021-06-29 20:17:52 +00:00
raise e
sys.exit()
2021-05-08 11:10:05 +00:00
2021-05-14 12:02:22 +00:00
2021-06-27 11:07:47 +00:00
def updateCrypto():
coingecko_client = CoinGeckoAPI()
coins, coin_info, unique_bases = readCryptoCSV('csv/crypto.csv', max_crypto)
2021-06-19 09:53:09 +00:00
try:
response = coingecko_client.get_price(ids=','.join(coins), vs_currencies = unique_bases, include_24hr_change=True)
CSV = open('csv/crypto.csv', 'w+')
CSV.write('symbol,name,base,current,24hr change\n')
2021-06-19 09:53:09 +00:00
for coin in coins:
info = coin_info[coin]
print(info)
CSV.write(info[0] + ',' + coin + ',' + info[1] + ',' +str(response[coin][info[1]]) + ',' + str(response[coin]['usd_24h_change']) + '\n')
CSV.close()
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
logf.write(str(e))
logf.write('. file: ' + fname)
logf.write('. line: ' + str(exc_tb.tb_lineno))
logf.write('. type: ' + str(exc_type))
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
2021-06-19 09:53:09 +00:00
print(e)
2021-05-21 13:24:37 +00:00
def updateNews():
2021-06-30 19:22:14 +00:00
max_per_cat = 10
2021-05-27 19:10:57 +00:00
try:
2021-06-19 09:53:09 +00:00
try:
#load user settings
2021-06-30 19:22:14 +00:00
headlines = []
2021-06-19 09:53:09 +00:00
settings = json.load(open('csv/news_settings.json', 'r'))
2021-05-27 19:10:57 +00:00
2021-06-30 19:22:14 +00:00
for setting in settings:
h = newsapi.get_top_headlines(**setting)
if len(h) > max_per_cat:
h = h[0:max_per_cat]
headlines.append(h)
2021-06-19 09:53:09 +00:00
except:
#if no settings just get top headlines
headlines = newsapi.get_top_headlines()
headline_titles = [headline['title'] for headline in headlines['articles']]
headline_sources = [headline['source']['name'] for headline in headlines['articles']]
headline_times = [headline['publishedAt']for headline in headlines['articles']]
CSV = open('csv/news.csv', 'w+')
CSV.write('headline,source,date,time\n')
2021-05-21 13:24:37 +00:00
2021-06-19 09:53:09 +00:00
for i, title in enumerate(headline_titles):
date, time = headline_times[i].split('T')
CSV.write(title.replace(',', '^') + ',' + headline_sources[i] + ',' + date + ',' + time + '\n')
CSV.close()
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
logf.write(str(e))
logf.write('. file: ' + fname)
logf.write('. line: ' + str(exc_tb.tb_lineno))
logf.write('. type: ' + str(exc_type))
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
2021-06-19 09:53:09 +00:00
print(e)
2021-05-21 13:24:37 +00:00
2021-06-27 11:07:47 +00:00
def updateWeather():
max_cities = 30
2021-06-27 11:07:47 +00:00
api_key = 'bd5d5096a5ba30bbcfb57ead42ab3fee'
2021-06-19 09:53:09 +00:00
try:
gn = geocoders.GeoNames(username='fintic')
2021-06-19 09:53:09 +00:00
f = open( "csv/weather_location.txt", 'r' )
line = next(f)
locations = line.split(',')
2021-06-19 09:53:09 +00:00
f.close()
current_weathers = []
daily_weathers = []
2021-06-19 09:53:09 +00:00
for location in locations:
loc = gn.geocode(location)
current_weather = {}
lat = loc.latitude
lon = loc.longitude
url = 'https://api.openweathermap.org/data/2.5/onecall?lat={}&units=metric&lon={}&appid={}'.format(lat, lon, api_key)
r = requests.get(url)
weather = r.json()['current']
current_weather['main_weather'] = weather['weather'][0]['main']
current_weather['description'] = weather['weather'][0]['description']
current_weather['temp'] = weather['temp']
current_weather['min_temp'] = r.json()['daily'][0]['temp']['min']
current_weather['max_temp'] = r.json()['daily'][0]['temp']['max']
current_weather['feels_like'] = weather['feels_like']
current_weather['humidity'] = weather['humidity']
current_weather['clouds'] = weather['clouds']
current_weather['wind_speed'] = weather['wind_speed']
current_weather['wind_direction'] = weather['wind_deg']
current_weather['visibility'] = weather['visibility']
current_weather['uv'] = weather['uvi']
current_weather['rain_chance'] = r.json()['hourly'][0]['pop']
current_weathers.append(current_weather)
daily_weather = []
daily = r.json()['daily']
for day in daily:
dct = {}
dct['main_weather'] = day['weather'][0]['main']
dct['description'] = day['weather'][0]['description']
dct['min_temp'] = day['temp']['min']
dct['max_temp'] = day['temp']['max']
daily_weather.append(dct)
daily_weathers.append(daily_weather)
2021-06-19 09:53:09 +00:00
json.dump( current_weather, open( "csv/current_weather.json", 'w+' ))
json.dump( daily_weather, open( "csv/daily_weather.json", 'w+' ))
2021-06-02 20:16:15 +00:00
2021-06-19 09:53:09 +00:00
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
logf.write(str(e))
logf.write('. file: ' + fname)
logf.write('. line: ' + str(exc_tb.tb_lineno))
logf.write('. type: ' + str(exc_type))
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
2021-06-19 09:53:09 +00:00
print(e)
2021-06-09 18:06:21 +00:00
2021-06-27 11:07:47 +00:00
def updateForex():
api_key = '862dbb6d1101ce0c5136'
2021-06-19 09:53:09 +00:00
try:
base = 'USD'
yesterday = datetime.now() - timedelta(1)
str_tod = datetime.strftime(datetime.now(), '%Y-%m-%d')
str_yest = datetime.strftime(yesterday, '%Y-%m-%d')
url = 'https://api.frankfurter.app/{}..{}?from={}'.format(str_yest, str_tod, base)
r = requests.get(url)
all_data = r.json()
currencies = ['AUD', 'CAD', 'CHF', 'EUR', 'GBP', 'JPY', 'NZD']
c_dict = {}
for curr in currencies:
current = all_data['rates'][str_tod][curr]
yesterday = all_data['rates'][str_yest][curr]
change = current - yesterday
c_dict[curr] = [current, yesterday]
json.dump([base, c_dict], open( "csv/currency.json", 'w+' ))
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
logf.write(str(e))
logf.write('. file: ' + fname)
logf.write('. line: ' + str(exc_tb.tb_lineno))
logf.write('. type: ' + str(exc_type))
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
2021-06-19 09:53:09 +00:00
print(e)
2021-06-09 18:06:21 +00:00
2021-06-17 19:06:23 +00:00
def updateLeagueTable(api_key, league_id):
try:
url = 'https://www.thesportsdb.com/api/v1/json/{}/lookuptable.php?l={}&s=2020-2021'.format(api_key, league_id)
r = requests.get(url)
all_data = r.json()
premier_teams = []
for i in range(len(all_data['table'])):
team = {}
if all_data['table'][i]['strTeam'] == "Calgary Flames":
print(all_data['table'][i]['dateUpdated'], all_data['table'][i]['intPoints'])
team['name'] = all_data['table'][i]['strTeam']
team['wins'] = all_data['table'][i]['intWin']
team['loss'] = all_data['table'][i]['intLoss']
team['draw'] = all_data['table'][i]['intDraw']
team['played'] = all_data['table'][i]['intPlayed']
team['standing'] = all_data['table'][i]['intRank']
team['points'] = all_data['table'][i]['intPoints']
premier_teams.append(team)
if league_id == '4328':
league = 'premier_league'
elif league_id == '4380':
league = 'NHL'
elif league_id == '4387':
league = 'NBA'
elif league_id == '4391':
league = 'NFL'
json.dump(premier_teams, open( "csv/sports/{}/team_stats.json".format(league), 'w+' ))
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
logf.write(str(e))
logf.write('. file: ' + fname)
logf.write('. line: ' + str(exc_tb.tb_lineno))
logf.write('. type: ' + str(exc_type))
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
print(e)
2021-06-17 19:06:23 +00:00
def updateLeagueEvents(api_key, league_id, time):
try:
if time == 'past':
url ='https://www.thesportsdb.com/api/v1/json/{}/eventspastleague.php?id={}'.format(api_key, league_id) #last 15 events on the league (premium only)
elif time == 'future':
url ='https://www.thesportsdb.com/api/v1/json/{}/eventsnextleague.php?id={}'.format(api_key, league_id) #next 15 events on the league (premium only)
elif time == 'live':
url = 'https://thesportsdb.com/api/v2/json/{}/livescore.php?l={}'.format(api_key, league_id)
r = requests.get(url)
all_data = r.json()
print()
#print(all_data['events'][0].keys())
#print([all_data['events'][i]['strTimestamp'] for i in range(len(all_data['events']))])
events = []
if not all_data['events'] is None:
2021-06-17 19:06:23 +00:00
for i in range(len(all_data['events'])):
event = {}
event['date'] = all_data['events'][i]['dateEvent']
if time == 'live':
event['time'] = all_data['events'][i]['strEventTime']
event['progess'] = all_data['events'][i]['strProgress']
event['status'] = all_data['events'][i]['strStatus']
else:
event['time'] = all_data['events'][i]['strTime']
event['round'] = all_data['events'][i]['intRound']
event['home_team'] = all_data['events'][i]['strHomeTeam']
event['home_score'] = all_data['events'][i]['intHomeScore']
event['away_team'] = all_data['events'][i]['strAwayTeam']
event['away_score'] = all_data['events'][i]['intAwayScore']
events.append(event)
if league_id == '4328':
league = 'premier_league'
elif league_id == '4380':
league = 'NHL'
elif league_id == '4387':
league = 'NBA'
elif league_id == '4391':
league = 'NFL'
json.dump(events, open( "csv/sports/{}/{}_games.json".format(league, time), 'w+' ))
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
logf.write(str(e))
logf.write('. file: ' + fname)
logf.write('. line: ' + str(exc_tb.tb_lineno))
logf.write('. type: ' + str(exc_type))
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
print(e)
2021-06-19 09:53:09 +00:00
2021-06-27 11:07:47 +00:00
def updateSports():
#read user settings to decide which sprots to update
api_key = '97436974'
2021-06-14 19:36:17 +00:00
2021-06-17 19:06:23 +00:00
prem_id = '4328' #prem
NHL_id = '4380'
NBA_id = '4387' #prem
NFL_id = '4391'
2021-06-27 11:07:47 +00:00
for i in [NHL_id]:
updateLeagueEvents(api_key, i, 'live')
updateLeagueEvents(api_key, i, 'past')
updateLeagueEvents(api_key, i, 'future')
2021-06-17 19:06:23 +00:00
2021-06-27 11:07:47 +00:00
2021-06-17 19:06:23 +00:00
updateLeagueTable(api_key, prem_id)
2021-06-19 09:53:09 +00:00
2021-06-17 19:06:23 +00:00
'https://www.thesportsdb.com/api/v1/json/{}/eventsnext.php?id=133602'.format(api_key) # next five events by team ID (paid) use this for upcoming team games
#url = 'https://www.thesportsdb.com/api/v1/json/{}/eventsseason.php?id=4328&s=2020-2021'.format(api_key) # all past events in premier league
2021-06-16 18:23:14 +00:00
url = 'https://www.thesportsdb.com/api/v2/json/{}/livescore.php?l=4380'.format(api_key) #live scores
2021-06-17 19:06:23 +00:00
2021-06-14 19:36:17 +00:00
2021-06-27 11:07:47 +00:00
def checkStocks(last_update, update_frequency):
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
opening = NY_time.replace(hour=9, minute=30, second=0, microsecond=0).replace(tzinfo=None)
closing = NY_time.replace(hour=16, minute=0, second=0, microsecond=0).replace(tzinfo=None)
2021-06-14 19:36:17 +00:00
2021-06-27 11:07:47 +00:00
symbols, stock_info = readCSV('csv/tickers.csv', max_stocks)
2021-06-14 19:36:17 +00:00
2021-06-27 11:07:47 +00:00
updated = False
diff = (NY_time - last_update).total_seconds()/60 #minutes
if opening < NY_time < closing and datetime.today().weekday() < 5: # we need to do real time updating
if diff >= update_frequency:
updated = True
updateStockPrices()
elif emptyInfo(symbols, stock_info): # if theres any empty stocks
updated = True
updateStockPrices()
2021-05-21 13:24:37 +00:00
2021-06-27 11:07:47 +00:00
else:
# update if last update was before the previous days closing
yday_closing = closing - dt.timedelta(days=1)
yday_str = yday_closing.strftime("%d/%m/%Y %H:%M:%S")
yday_closing = datetime.strptime(yday_str, "%d/%m/%Y %H:%M:%S")
if last_update < yday_closing:
updated = True
updateStockPrices(symbols)
return updated
if __name__ == '__main__':
2021-06-29 20:17:52 +00:00
logf = open("log.txt", "w")
t = time.time()
2021-06-30 19:22:14 +00:00
'''
2021-06-29 20:17:52 +00:00
updateStockPrices()
print('finnhub:',time.time() -t)
t = time.time()
updateStockPricesIEX()
print('iex:', time.time() -t)
sys.exit()
2021-06-30 19:22:14 +00:00
'''
2021-05-08 11:10:05 +00:00
max_stocks = 200
max_crypto = 100
2021-05-07 18:39:42 +00:00
2021-06-27 11:07:47 +00:00
newsapi = NewsApiClient(api_key='cf08652bd17647b89aaf469a1a8198a9')
2021-05-07 18:39:42 +00:00
2021-06-30 19:22:14 +00:00
update_frequencies = {'stocks':2, 'crypto':10, 'news':120, 'weather': 10} #minutes
2021-05-05 19:26:56 +00:00
2021-05-06 19:59:27 +00:00
NY_zone = pytz.timezone('America/New_York')
2021-06-27 11:07:47 +00:00
CET_zone = pytz.timezone('Europe/Berlin')
2021-05-05 19:26:56 +00:00
2021-05-06 19:59:27 +00:00
NY_time = datetime.now(NY_zone)
2021-06-19 09:53:09 +00:00
2021-06-27 11:07:47 +00:00
CET_time = datetime.now(CET_zone)
2021-06-19 09:53:09 +00:00
2021-06-27 11:07:47 +00:00
NY_str = NY_time.strftime("%d/%m/%Y %H:%M:%S")
CET_str = NY_time.strftime("%d/%m/%Y %H:%M:%S")
2021-06-16 18:23:14 +00:00
2021-06-27 11:07:47 +00:00
#f = open('csv/last_updates.json', 'w+')
#update_times = {'stocks':NY_str, 'crypto':NY_str, 'news':NY_str, 'weather': NY_str, 'forex': CET_str} # all in NY time apart from forex in CET
#json.dump(update_times, f)
#f.close()
2021-05-27 19:10:57 +00:00
2021-06-27 11:07:47 +00:00
f = open('csv/last_updates.json', 'r')
last_updates = json.load(f)
f.close()
2021-05-27 19:10:57 +00:00
t = time.time()
2021-06-29 20:17:52 +00:00
2021-05-27 19:10:57 +00:00
try:
while True:
2021-06-27 11:07:47 +00:00
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
2021-05-27 19:10:57 +00:00
msg = getInput()
2021-06-27 11:07:47 +00:00
#stocks
stock_time = datetime.strptime(last_updates['stocks'], "%d/%m/%Y %H:%M:%S")
stock_frequency = update_frequencies['stocks']
if checkStocks(stock_time, stock_frequency):
stock_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
last_updates['stocks'] = stock_time
2021-05-27 19:10:57 +00:00
2021-06-27 11:07:47 +00:00
# crypto
crypto_time = datetime.strptime(last_updates['crypto'], "%d/%m/%Y %H:%M:%S")
crypto_frequency = update_frequencies['crypto']
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
diff = (NY_time - crypto_time).total_seconds()/60 #minutes
if diff >= update_frequencies['crypto']:
crypto_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
updateCrypto()
last_updates['crypto'] = crypto_time
# weather
weather_time = datetime.strptime(last_updates['weather'], "%d/%m/%Y %H:%M:%S")
weather_frequency = update_frequencies['weather']
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
diff = (NY_time - weather_time).total_seconds()/60 #minutes
if diff >= update_frequencies['weather']:
weather_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
updateWeather()
last_updates['weather'] = weather_time
2021-05-27 19:10:57 +00:00
# news
2021-06-27 11:07:47 +00:00
news_time = datetime.strptime(last_updates['news'], "%d/%m/%Y %H:%M:%S")
news_frequency = update_frequencies['news']
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
diff = (NY_time - news_time).total_seconds()/60 #minutes
2021-06-27 11:07:47 +00:00
if diff >= update_frequencies['news']:
news_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
2021-05-27 19:10:57 +00:00
updateNews()
2021-06-27 11:07:47 +00:00
last_updates['news'] = news_time
2021-05-08 11:10:05 +00:00
2021-06-27 11:07:47 +00:00
#forex updates once every 24hours at 1700 CET
# update if last update was before the previous days closing
forex_time = datetime.strptime(last_updates['forex'], "%d/%m/%Y %H:%M:%S")
CET_time = datetime.now(CET_zone)
yday_update = (CET_time.replace(hour=17, minute=00, second=0, microsecond=0) - dt.timedelta(days=1)).replace(tzinfo=None)
if forex_time < yday_update:
forex_time = CET_time.strftime("%d/%m/%Y %H:%M:%S")
last_updates['forex'] = forex_time
updateForex()
2021-05-05 19:26:56 +00:00
2021-05-27 19:10:57 +00:00
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
2021-06-27 11:07:47 +00:00
logf.write(str(e))
logf.write('. file: ' + fname)
logf.write('. line: ' + str(exc_tb.tb_lineno))
logf.write('. type: ' + str(exc_type))
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
2021-06-27 11:07:47 +00:00
print(e)
2021-05-05 19:26:56 +00:00
2021-05-27 19:10:57 +00:00