678 lines
24 KiB
Python
678 lines
24 KiB
Python
# Copyright (C) 2020 Fintic, finticofficial@gmail.com
|
|
#
|
|
# This file is part of Fintic project, developed by Neythen Treloar and Justin Dunn
|
|
#
|
|
# This code can not be copied and/or distributed without the express
|
|
# permission of Fintic
|
|
|
|
import finnhub
|
|
import time
|
|
import csv
|
|
import pytz
|
|
from datetime import datetime, timedelta
|
|
import json
|
|
import datetime as dt
|
|
import sys, os, base64, hashlib, hmac, select
|
|
import requests
|
|
from pycoingecko import CoinGeckoAPI
|
|
from newsapi import NewsApiClient
|
|
import traceback
|
|
from geopy import geocoders
|
|
|
|
def getInput(Block=False):
|
|
if Block or select.select([sys.stdin], [], [], 0) == ([sys.stdin], [], []):
|
|
msg = sys.stdin.read(1)
|
|
#sys.stdin.flush()
|
|
else:
|
|
msg = ''
|
|
return msg
|
|
|
|
def readCSV(file_path, max_stocks):
|
|
|
|
symbols = []
|
|
stock_info = {}
|
|
f = open(file_path, 'r')
|
|
CSV = csv.reader(f)
|
|
next(CSV)
|
|
i = 0
|
|
for row in CSV:
|
|
|
|
if i < max_stocks:
|
|
i += 1
|
|
|
|
try:
|
|
symbol, current_price, opening_price = row
|
|
symbols.append(symbol)
|
|
stock_info[symbol] = [current_price, opening_price]
|
|
except:
|
|
symbol = row[0]
|
|
symbols.append(symbol)
|
|
stock_info[symbol] = []
|
|
else:
|
|
|
|
break
|
|
|
|
f.close()
|
|
|
|
return symbols, stock_info
|
|
|
|
def readCryptoCSV(file_path, max_crypto):
|
|
|
|
symbols = []
|
|
names = []
|
|
stock_info = {}
|
|
f = open(file_path, 'r')
|
|
CSV = csv.reader(f)
|
|
next(CSV)
|
|
i = 0
|
|
unique_bases = []
|
|
for row in CSV:
|
|
|
|
if i < max_crypto:
|
|
|
|
i += 1
|
|
|
|
try:
|
|
symbol, name, base, current_price, opening_price = row
|
|
symbols.append(symbol)
|
|
names.append(name)
|
|
stock_info[name] = [symbol, base, current_price, opening_price]
|
|
if base not in unique_bases:
|
|
unique_bases.append(base)
|
|
except:
|
|
symbol, name, base = row
|
|
if base not in unique_bases:
|
|
unique_bases.append(base)
|
|
symbols.append(symbol)
|
|
names.append(name)
|
|
stock_info[name] = [symbol, base]
|
|
else:
|
|
|
|
break
|
|
|
|
f.close()
|
|
|
|
return names, stock_info, unique_bases
|
|
|
|
def emptyInfo(symbols, stock_info):
|
|
update = False
|
|
for symbol in symbols:
|
|
if len(stock_info[symbol]) == 1: # stock with no info
|
|
update = True
|
|
return update
|
|
|
|
def updateUpdate(NY_time):
|
|
NY_str = NY_time.strftime("%d/%m/%Y %H:%M:%S")
|
|
f = open('csv/last_update.csv', 'w+')
|
|
f.write(NY_str + '\n')
|
|
f.close()
|
|
|
|
def updateStockPricesFinhubb():
|
|
max_stocks = 200
|
|
finnhubsandboxAPIkey = "sandbox_c24qddqad3ickpckgg8g" #Finnhub
|
|
finnhubAPIkey = "c24qddqad3ickpckgg80" #Finnhub
|
|
finnhubClient = finnhub.Client(api_key=finnhubAPIkey)
|
|
|
|
|
|
symbols, stock_info = readCSV('csv/tickers.csv', max_stocks)
|
|
try:
|
|
quotes = [finnhubClient.quote(symbol) for symbol in symbols]
|
|
current_prices = [quote['c'] for quote in quotes]
|
|
opening_prices = [quote['o'] for quote in quotes]
|
|
|
|
|
|
|
|
CSV = open('csv/tickers.csv', 'w+')
|
|
CSV.write('name,current,opening\n')
|
|
for i, symbol in enumerate(symbols):
|
|
|
|
CSV.write(symbol + ',' + str(current_prices[i]) + ',' + str(opening_prices[i]) + '\n')
|
|
CSV.close()
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
|
logf.write(str(e))
|
|
logf.write('. file: ' + fname)
|
|
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
|
logf.write('. type: ' + str(exc_type))
|
|
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
|
|
|
|
|
|
|
def updateStockPrices():
|
|
|
|
iexAPIkey = 'pk_d066d39789bd41caac209bca850a35db' #IEX
|
|
|
|
max_stocks = 200
|
|
symbols, stock_info = readCSV('csv/tickers.csv', max_stocks)
|
|
try:
|
|
|
|
|
|
current_prices = []
|
|
opening_prices = []
|
|
for symbol in symbols:
|
|
|
|
method = 'GET'
|
|
host = 'https://cloud.iexapis.com/stable'
|
|
|
|
intradayEndpoint = '/stock/'+ symbol+ '/intraday-prices'
|
|
querystring = '?chartIEXOnly=true&token='+iexAPIkey
|
|
|
|
intraday_request_url = host + intradayEndpoint + querystring
|
|
|
|
intraday_response = requests.get(intraday_request_url)
|
|
|
|
|
|
for i in range(len(intraday_response.json())):
|
|
opn = intraday_response.json()[i]['open']
|
|
if opn is not None:
|
|
break
|
|
for i in range(len(intraday_response.json())-1, 0, -1):
|
|
|
|
current = intraday_response.json()[i]['close']
|
|
if current is not None:
|
|
break
|
|
|
|
opening_prices.append(opn)
|
|
current_prices.append(current)
|
|
|
|
CSV = open('csv/tickers.csv', 'w+')
|
|
CSV.write('name,current,opening\n')
|
|
for i, symbol in enumerate(symbols):
|
|
|
|
CSV.write(symbol + ',' + str(current_prices[i]) + ',' + str(opening_prices[i]) + '\n')
|
|
CSV.close()
|
|
|
|
|
|
|
|
except Exception as e:
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
|
logf.write(str(e))
|
|
logf.write('. file: ' + fname)
|
|
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
|
logf.write('. type: ' + str(exc_type))
|
|
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
|
|
|
|
|
|
|
|
|
def updateCrypto():
|
|
coingecko_client = CoinGeckoAPI()
|
|
|
|
coins, coin_info, unique_bases = readCryptoCSV('csv/crypto.csv', max_crypto)
|
|
try:
|
|
response = coingecko_client.get_price(ids=','.join(coins), vs_currencies = unique_bases, include_24hr_change=True)
|
|
CSV = open('csv/crypto.csv', 'w+')
|
|
CSV.write('symbol,name,base,current,24hr change\n')
|
|
|
|
for coin in coins:
|
|
info = coin_info[coin]
|
|
|
|
CSV.write(info[0] + ',' + coin + ',' + info[1] + ',' +str(response[coin][info[1]]) + ',' + str(response[coin]['usd_24h_change']) + '\n')
|
|
CSV.close()
|
|
except Exception as e:
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
|
logf.write(str(e))
|
|
logf.write('. file: ' + fname)
|
|
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
|
logf.write('. type: ' + str(exc_type))
|
|
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
|
|
|
|
|
def updateNews():
|
|
max_per_cat = 10
|
|
try:
|
|
try:
|
|
#load user settings
|
|
headlines = []
|
|
settings = json.load(open('csv/news_settings.json', 'r'))
|
|
|
|
for setting in settings:
|
|
h = newsapi.get_top_headlines(**setting)
|
|
if len(h) > max_per_cat:
|
|
h = h[0:max_per_cat]
|
|
headlines.append(h)
|
|
except:
|
|
#if no settings just get top headlines
|
|
headlines = newsapi.get_top_headlines()
|
|
|
|
headline_titles = [headline['title'] for headline in headlines['articles']]
|
|
headline_sources = [headline['source']['name'] for headline in headlines['articles']]
|
|
headline_times = [headline['publishedAt']for headline in headlines['articles']]
|
|
|
|
CSV = open('csv/news.csv', 'w+')
|
|
CSV.write('headline,source,date,time\n')
|
|
|
|
for i, title in enumerate(headline_titles):
|
|
date, time = headline_times[i].split('T')
|
|
CSV.write(title.replace(',', '^') + ',' + headline_sources[i] + ',' + date + ',' + time + '\n')
|
|
|
|
CSV.close()
|
|
except Exception as e:
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
|
logf.write(str(e))
|
|
logf.write('. file: ' + fname)
|
|
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
|
logf.write('. type: ' + str(exc_type))
|
|
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
|
|
|
|
|
def updateWeather():
|
|
max_cities = 30
|
|
api_key = 'a9476947fa1a2f712076453bec4a0df5'
|
|
try:
|
|
gn = geocoders.GeoNames(username='fintic')
|
|
|
|
|
|
f = open( "csv/weather_location.txt", 'r' )
|
|
line = next(f)
|
|
locations = line.split(',')
|
|
f.close()
|
|
|
|
current_weathers = []
|
|
daily_weathers = []
|
|
|
|
|
|
for location in locations:
|
|
loc = gn.geocode(location)
|
|
|
|
|
|
|
|
current_weather = {}
|
|
|
|
lat = loc.latitude
|
|
lon = loc.longitude
|
|
url = 'https://api.openweathermap.org/data/2.5/onecall?lat={}&units=metric&lon={}&appid={}'.format(lat, lon, api_key)
|
|
r = requests.get(url)
|
|
|
|
weather = r.json()['current']
|
|
|
|
current_weather['main_weather'] = weather['weather'][0]['main']
|
|
current_weather['description'] = weather['weather'][0]['description']
|
|
current_weather['temp'] = weather['temp']
|
|
current_weather['min_temp'] = r.json()['daily'][0]['temp']['min']
|
|
current_weather['max_temp'] = r.json()['daily'][0]['temp']['max']
|
|
current_weather['feels_like'] = weather['feels_like']
|
|
current_weather['humidity'] = weather['humidity']
|
|
current_weather['clouds'] = weather['clouds']
|
|
current_weather['wind_speed'] = weather['wind_speed']
|
|
current_weather['wind_direction'] = weather['wind_deg']
|
|
current_weather['visibility'] = weather['visibility']
|
|
current_weather['uv'] = weather['uvi']
|
|
current_weather['rain_chance'] = r.json()['hourly'][0]['pop']
|
|
|
|
|
|
|
|
current_weathers.append(current_weather)
|
|
|
|
daily_weather = []
|
|
daily = r.json()['daily']
|
|
|
|
for day in daily:
|
|
dct = {}
|
|
dct['main_weather'] = day['weather'][0]['main']
|
|
dct['description'] = day['weather'][0]['description']
|
|
dct['min_temp'] = day['temp']['min']
|
|
dct['max_temp'] = day['temp']['max']
|
|
daily_weather.append(dct)
|
|
|
|
daily_weathers.append(daily_weather)
|
|
|
|
|
|
|
|
json.dump( current_weathers, open( "csv/current_weather.json", 'w+' ))
|
|
json.dump( daily_weathers, open( "csv/daily_weather.json", 'w+' ))
|
|
|
|
except Exception as e:
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
|
logf.write(str(e))
|
|
logf.write('. file: ' + fname)
|
|
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
|
logf.write('. type: ' + str(exc_type))
|
|
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
|
|
|
|
|
def updateForex():
|
|
|
|
try:
|
|
base = 'USD'
|
|
yesterday = datetime.now() - timedelta(1)
|
|
|
|
str_tod = datetime.strftime(datetime.now(), '%Y-%m-%d')
|
|
str_yest = datetime.strftime(yesterday, '%Y-%m-%d')
|
|
|
|
url = 'https://api.frankfurter.app/{}..{}?from={}'.format(str_yest, str_tod, base)
|
|
r = requests.get(url)
|
|
all_data = r.json()
|
|
|
|
currencies = ['AUD', 'CAD', 'CHF', 'EUR', 'GBP', 'JPY', 'NZD']
|
|
|
|
c_dict = {}
|
|
|
|
for curr in currencies:
|
|
|
|
current = all_data['rates'][str_tod][curr]
|
|
yesterday = all_data['rates'][str_yest][curr]
|
|
|
|
change = current - yesterday
|
|
|
|
c_dict[curr] = [current, yesterday]
|
|
|
|
|
|
json.dump([base, c_dict], open( "csv/currency.json", 'w+' ))
|
|
except Exception as e:
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
|
logf.write(str(e))
|
|
logf.write('. file: ' + fname)
|
|
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
|
logf.write('. type: ' + str(exc_type))
|
|
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
|
|
|
|
|
def updateLeagueTable(api_key, league_id):
|
|
try:
|
|
url = 'https://www.thesportsdb.com/api/v1/json/{}/lookuptable.php?l={}&s=2020-2021'.format(api_key, league_id)
|
|
|
|
r = requests.get(url)
|
|
all_data = r.json()
|
|
|
|
|
|
|
|
premier_teams = []
|
|
|
|
|
|
for i in range(len(all_data['table'])):
|
|
team = {}
|
|
|
|
|
|
|
|
team['name'] = all_data['table'][i]['strTeam']
|
|
team['wins'] = all_data['table'][i]['intWin']
|
|
team['loss'] = all_data['table'][i]['intLoss']
|
|
team['draw'] = all_data['table'][i]['intDraw']
|
|
team['played'] = all_data['table'][i]['intPlayed']
|
|
team['standing'] = all_data['table'][i]['intRank']
|
|
team['points'] = all_data['table'][i]['intPoints']
|
|
|
|
premier_teams.append(team)
|
|
|
|
|
|
if league_id == '4328':
|
|
league = 'premier_league'
|
|
elif league_id == '4380':
|
|
league = 'NHL'
|
|
elif league_id == '4387':
|
|
league = 'NBA'
|
|
elif league_id == '4391':
|
|
league = 'NFL'
|
|
|
|
json.dump(premier_teams, open( "csv/sports/{}/team_stats.json".format(league), 'w+' ))
|
|
except Exception as e:
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
|
logf.write(str(e))
|
|
logf.write('. file: ' + fname)
|
|
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
|
logf.write('. type: ' + str(exc_type))
|
|
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
|
|
|
|
|
def updateLeagueEvents(api_key, league_id, time):
|
|
|
|
try:
|
|
if time == 'past':
|
|
url ='https://www.thesportsdb.com/api/v1/json/{}/eventspastleague.php?id={}'.format(api_key, league_id) #last 15 events on the league (premium only)
|
|
elif time == 'future':
|
|
url ='https://www.thesportsdb.com/api/v1/json/{}/eventsnextleague.php?id={}'.format(api_key, league_id) #next 15 events on the league (premium only)
|
|
elif time == 'live':
|
|
url = 'https://thesportsdb.com/api/v2/json/{}/livescore.php?l={}'.format(api_key, league_id)
|
|
|
|
r = requests.get(url)
|
|
all_data = r.json()
|
|
|
|
|
|
events = []
|
|
|
|
if not all_data['events'] is None:
|
|
|
|
for i in range(len(all_data['events'])):
|
|
event = {}
|
|
event['date'] = all_data['events'][i]['dateEvent']
|
|
|
|
if time == 'live':
|
|
event['time'] = all_data['events'][i]['strEventTime']
|
|
event['progess'] = all_data['events'][i]['strProgress']
|
|
event['status'] = all_data['events'][i]['strStatus']
|
|
else:
|
|
event['time'] = all_data['events'][i]['strTime']
|
|
event['round'] = all_data['events'][i]['intRound']
|
|
event['home_team'] = all_data['events'][i]['strHomeTeam']
|
|
event['home_score'] = all_data['events'][i]['intHomeScore']
|
|
event['away_team'] = all_data['events'][i]['strAwayTeam']
|
|
event['away_score'] = all_data['events'][i]['intAwayScore']
|
|
|
|
events.append(event)
|
|
|
|
|
|
if league_id == '4328':
|
|
league = 'premier_league'
|
|
elif league_id == '4380':
|
|
league = 'NHL'
|
|
elif league_id == '4387':
|
|
league = 'NBA'
|
|
elif league_id == '4391':
|
|
league = 'NFL'
|
|
|
|
json.dump(events, open( "csv/sports/{}/{}_games.json".format(league, time), 'w+' ))
|
|
except Exception as e:
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
|
logf.write(str(e))
|
|
logf.write('. file: ' + fname)
|
|
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
|
logf.write('. type: ' + str(exc_type))
|
|
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
|
|
|
|
|
def updateSports():
|
|
#read user settings to decide which sprots to update
|
|
api_key = '97436974'
|
|
|
|
prem_id = '4328' #prem
|
|
NHL_id = '4380'
|
|
NBA_id = '4387' #prem
|
|
NFL_id = '4391'
|
|
|
|
for i in [NHL_id, prem_id]:
|
|
updateLeagueEvents(api_key, i, 'live')
|
|
updateLeagueEvents(api_key, i, 'past')
|
|
updateLeagueEvents(api_key, i, 'future')
|
|
|
|
|
|
|
|
updateLeagueTable(api_key, prem_id)
|
|
|
|
'https://www.thesportsdb.com/api/v1/json/{}/eventsnext.php?id=133602'.format(api_key) # next five events by team ID (paid) use this for upcoming team games
|
|
|
|
#url = 'https://www.thesportsdb.com/api/v1/json/{}/eventsseason.php?id=4328&s=2020-2021'.format(api_key) # all past events in premier league
|
|
|
|
url = 'https://www.thesportsdb.com/api/v2/json/{}/livescore.php?l=4380'.format(api_key) #live scores
|
|
|
|
|
|
def checkStocks(last_update, update_frequency):
|
|
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
|
|
opening = NY_time.replace(hour=9, minute=30, second=0, microsecond=0).replace(tzinfo=None)
|
|
closing = NY_time.replace(hour=16, minute=0, second=0, microsecond=0).replace(tzinfo=None)
|
|
|
|
|
|
symbols, stock_info = readCSV('csv/tickers.csv', max_stocks)
|
|
|
|
updated = False
|
|
|
|
diff = (NY_time - last_update).total_seconds()/60 #minutes
|
|
if opening < NY_time < closing and datetime.today().weekday() < 5: # we need to do real time updating
|
|
|
|
|
|
if diff >= update_frequency:
|
|
updated = True
|
|
updateStockPrices()
|
|
|
|
|
|
elif emptyInfo(symbols, stock_info): # if theres any empty stocks
|
|
updated = True
|
|
updateStockPrices()
|
|
|
|
|
|
else:
|
|
# update if last update was before the previous days closing
|
|
yday_closing = closing - dt.timedelta(days=1)
|
|
yday_str = yday_closing.strftime("%d/%m/%Y %H:%M:%S")
|
|
yday_closing = datetime.strptime(yday_str, "%d/%m/%Y %H:%M:%S")
|
|
|
|
if last_update < yday_closing:
|
|
updated = True
|
|
updateStockPrices()
|
|
|
|
return updated
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
logf = open("log.txt", "w")
|
|
|
|
t = time.time()
|
|
|
|
max_stocks = 200
|
|
max_crypto = 100
|
|
|
|
|
|
|
|
newsapi = NewsApiClient(api_key='cf08652bd17647b89aaf469a1a8198a9')
|
|
|
|
update_frequencies = {'stocks':2, 'crypto':10, 'news':120, 'weather': 120, 'sports': 120} #minutes
|
|
|
|
NY_zone = pytz.timezone('America/New_York')
|
|
CET_zone = pytz.timezone('Europe/Berlin')
|
|
|
|
NY_time = datetime.now(NY_zone)
|
|
|
|
CET_time = datetime.now(CET_zone)
|
|
|
|
NY_str = NY_time.strftime("%d/%m/%Y %H:%M:%S")
|
|
CET_str = NY_time.strftime("%d/%m/%Y %H:%M:%S")
|
|
|
|
#f = open('csv/last_updates.json', 'w+')
|
|
#update_times = {'stocks':NY_str, 'crypto':NY_str, 'news':NY_str, 'weather': NY_str, 'forex': CET_str} # all in NY time apart from forex in CET
|
|
#json.dump(update_times, f)
|
|
#f.close()
|
|
|
|
|
|
|
|
try:
|
|
f = open('csv/last_updates.json', 'r')
|
|
last_updates = json.load(f)
|
|
f.close()
|
|
|
|
except:
|
|
last_updates = {"stocks": "27/06/2021 07:05:39", "crypto": "27/06/2021 07:05:39", "news": "27/06/2021 07:05:39", "weather": "27/06/2021 07:05:39", "forex": "27/06/2021 07:05:39", "sports": "27/06/2021 07:05:39"}
|
|
|
|
t = time.time()
|
|
|
|
|
|
try:
|
|
while True:
|
|
|
|
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
|
|
|
|
msg = getInput()
|
|
|
|
#stocks
|
|
stock_time = datetime.strptime(last_updates['stocks'], "%d/%m/%Y %H:%M:%S")
|
|
stock_frequency = update_frequencies['stocks']
|
|
if checkStocks(stock_time, stock_frequency):
|
|
stock_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
|
|
last_updates['stocks'] = stock_time
|
|
|
|
# crypto
|
|
crypto_time = datetime.strptime(last_updates['crypto'], "%d/%m/%Y %H:%M:%S")
|
|
|
|
|
|
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
|
|
diff = (NY_time - crypto_time).total_seconds()/60 #minutes
|
|
if diff >= update_frequencies['crypto']:
|
|
crypto_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
|
|
updateCrypto()
|
|
last_updates['crypto'] = crypto_time
|
|
|
|
|
|
# weather
|
|
weather_time = datetime.strptime(last_updates['weather'], "%d/%m/%Y %H:%M:%S")
|
|
|
|
|
|
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
|
|
diff = (NY_time - weather_time).total_seconds()/60 #minutes
|
|
if diff >= update_frequencies['weather']:
|
|
weather_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
|
|
updateWeather()
|
|
last_updates['weather'] = weather_time
|
|
|
|
|
|
# news
|
|
news_time = datetime.strptime(last_updates['news'], "%d/%m/%Y %H:%M:%S")
|
|
|
|
|
|
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
|
|
diff = (NY_time - news_time).total_seconds()/60 #minutes
|
|
if diff >= update_frequencies['news']:
|
|
news_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
|
|
updateNews()
|
|
last_updates['news'] = news_time
|
|
|
|
# sports
|
|
sports_time = datetime.strptime(last_updates['sports'], "%d/%m/%Y %H:%M:%S")
|
|
|
|
|
|
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
|
|
diff = (NY_time - sports_time).total_seconds()/60 #minutes
|
|
if diff >= update_frequencies['sports']:
|
|
sports_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
|
|
updateSports()
|
|
last_updates['sports'] = sports_time
|
|
|
|
#forex updates once every 24hours at 1700 CET
|
|
|
|
# update if last update was before the previous days closing
|
|
forex_time = datetime.strptime(last_updates['forex'], "%d/%m/%Y %H:%M:%S")
|
|
CET_time = datetime.now(CET_zone)
|
|
yday_update = (CET_time.replace(hour=17, minute=00, second=0, microsecond=0) - dt.timedelta(days=1)).replace(tzinfo=None)
|
|
|
|
if forex_time < yday_update:
|
|
forex_time = CET_time.strftime("%d/%m/%Y %H:%M:%S")
|
|
last_updates['forex'] = forex_time
|
|
updateForex()
|
|
|
|
|
|
json.dump(last_updates, open('csv/last_updates.json', 'w+'))
|
|
|
|
|
|
except Exception as e:
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
|
logf.write(str(e))
|
|
logf.write('. file: ' + fname)
|
|
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
|
logf.write('. type: ' + str(exc_type))
|
|
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
|
|
|
|
|
|