import finnhub import time import csv import pytz from datetime import datetime, timedelta import json import datetime as dt import sys, os, base64, hashlib, hmac, select import requests from pycoingecko import CoinGeckoAPI from newsapi import NewsApiClient import traceback from geopy import geocoders def getInput(Block=False): if Block or select.select([sys.stdin], [], [], 0) == ([sys.stdin], [], []): msg = sys.stdin.read(1) #sys.stdin.flush() else: msg = '' return msg def readCSV(file_path, max_stocks): symbols = [] stock_info = {} f = open(file_path, 'r') CSV = csv.reader(f) next(CSV) i = 0 for row in CSV: if i < max_stocks: i += 1 try: symbol, current_price, opening_price = row symbols.append(symbol) stock_info[symbol] = [current_price, opening_price] except: symbol = row[0] symbols.append(symbol) stock_info[symbol] = [] else: print('max stocks exceeded') break f.close() return symbols, stock_info def readCryptoCSV(file_path, max_crypto): symbols = [] names = [] stock_info = {} f = open(file_path, 'r') CSV = csv.reader(f) next(CSV) i = 0 unique_bases = [] for row in CSV: print(row) if i < max_crypto: i += 1 try: symbol, name, base, current_price, opening_price = row symbols.append(symbol) names.append(name) stock_info[name] = [symbol, base, current_price, opening_price] if base not in unique_bases: unique_bases.append(base) except: symbol, name, base = row if base not in unique_bases: unique_bases.append(base) symbols.append(symbol) names.append(name) stock_info[name] = [symbol, base] else: print(i, max_crypto, 'max crypto exceeded') break f.close() return names, stock_info, unique_bases def emptyInfo(symbols, stock_info): update = False for symbol in symbols: if len(stock_info[symbol]) == 1: # stock with no info update = True return update def updateUpdate(NY_time): NY_str = NY_time.strftime("%d/%m/%Y %H:%M:%S") f = open('csv/last_update.csv', 'w+') f.write(NY_str + '\n') f.close() def updateStockPrices(): max_stocks = 200 finnhubsandboxAPIkey = "sandbox_c24qddqad3ickpckgg8g" #Finnhub finnhubAPIkey = "c24qddqad3ickpckgg80" #Finnhub finnhubClient = finnhub.Client(api_key=finnhubAPIkey) symbols, stock_info = readCSV('csv/tickers.csv', max_stocks) try: quotes = [finnhubClient.quote(symbol) for symbol in symbols] current_prices = [quote['c'] for quote in quotes] opening_prices = [quote['o'] for quote in quotes] CSV = open('csv/tickers.csv', 'w+') CSV.write('name,current,opening\n') for i, symbol in enumerate(symbols): CSV.write(symbol + ',' + str(current_prices[i]) + ',' + str(opening_prices[i]) + '\n') CSV.close() print('API called successfully') except Exception as e: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] logf.write(str(e)) logf.write('. file: ' + fname) logf.write('. line: ' + str(exc_tb.tb_lineno)) logf.write('. type: ' + str(exc_type)) logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]))) print(e) def updateStockPricesIEX(): iexAPIkey = 'pk_68ef6a15902c41f887f0b544a0ca17cf' #IEX iexSandboxAPIkey = 'Tpk_0078dff413ef4f979137f7111452dc4b' max_stocks = 200 symbols, stock_info = readCSV('csv/tickers.csv', max_stocks) try: ''' symbols_str = ','.join(symbols) print(symbols) print(symbols_str) method = 'GET' host = 'https://cloud.iexapis.com/stable' lastEndpoint = '/tops/last' querystring = '?symbols=' + symbols_str +'&token='+iexAPIkey last_request_url = host + lastEndpoint + querystring print('\nBEGIN REQUEST++++++++++++++++++++++++++++++++++++') print('Request URL = ' + last_request_url) last_response = requests.get(last_request_url) print('done') if last_response.status_code == 200: print('last success') for stock in last_response.json(): current_prices.append(stock['price']) ''' current_prices = [] opening_prices = [] for symbol in symbols: print(symbol) method = 'GET' host = 'https://cloud.iexapis.com/stable' intradayEndpoint = '/stock/'+ symbol+ '/intraday-prices' querystring = '?chartIEXOnly=true&token='+iexAPIkey intraday_request_url = host + intradayEndpoint + querystring print('\nBEGIN REQUEST++++++++++++++++++++++++++++++++++++') print('Request URL = ' + intraday_request_url) intraday_response = requests.get(intraday_request_url) print('\nRESPONSE++++++++++++++++++++++++++++++++++++') print('Response code: \n',intraday_response) print(intraday_response.text) opn = intraday_response.json()[0]['open'] current = intraday_response.json()[-1]['close'] opening_prices.append(opn) current_prices.append(current) ''' for time_point in intraday_response.json(): print(time_point['date']) print(time_point['open']) ''' CSV = open('csv/tickers.csv', 'w+') CSV.write('name,current,opening\n') for i, symbol in enumerate(symbols): CSV.write(symbol + ',' + str(current_prices[i]) + ',' + str(opening_prices[i]) + '\n') CSV.close() except Exception as e: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] logf.write(str(e)) logf.write('. file: ' + fname) logf.write('. line: ' + str(exc_tb.tb_lineno)) logf.write('. type: ' + str(exc_type)) logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]))) sys.exit() def updateCrypto(): coingecko_client = CoinGeckoAPI() coins, coin_info, unique_bases = readCryptoCSV('csv/crypto.csv', max_crypto) try: response = coingecko_client.get_price(ids=','.join(coins), vs_currencies = unique_bases, include_24hr_change=True) print(response) CSV = open('csv/crypto.csv', 'w+') CSV.write('symbol,name,base,current,24hr change\n') for coin in coins: info = coin_info[coin] print(info) CSV.write(info[0] + ',' + coin + ',' + info[1] + ',' +str(response[coin][info[1]]) + ',' + str(response[coin]['usd_24h_change']) + '\n') CSV.close() except Exception as e: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] logf.write(str(e)) logf.write('. file: ' + fname) logf.write('. line: ' + str(exc_tb.tb_lineno)) logf.write('. type: ' + str(exc_type)) logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]))) print(e) def updateNews(): max_per_cat = 10 try: try: #load user settings headlines = [] settings = json.load(open('csv/news_settings.json', 'r')) for setting in settings: h = newsapi.get_top_headlines(**setting) if len(h) > max_per_cat: h = h[0:max_per_cat] headlines.append(h) except: #if no settings just get top headlines headlines = newsapi.get_top_headlines() headline_titles = [headline['title'] for headline in headlines['articles']] headline_sources = [headline['source']['name'] for headline in headlines['articles']] headline_times = [headline['publishedAt']for headline in headlines['articles']] CSV = open('csv/news.csv', 'w+') CSV.write('headline,source,date,time\n') for i, title in enumerate(headline_titles): date, time = headline_times[i].split('T') CSV.write(title.replace(',', '^') + ',' + headline_sources[i] + ',' + date + ',' + time + '\n') CSV.close() except Exception as e: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] logf.write(str(e)) logf.write('. file: ' + fname) logf.write('. line: ' + str(exc_tb.tb_lineno)) logf.write('. type: ' + str(exc_type)) logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]))) print(e) def updateWeather(): max_cities = 30 api_key = 'bd5d5096a5ba30bbcfb57ead42ab3fee' try: gn = geocoders.GeoNames(username='fintic') f = open( "csv/weather_location.txt", 'r' ) line = next(f) locations = line.split(',') f.close() current_weathers = [] daily_weathers = [] for location in locations: loc = gn.geocode(location) current_weather = {} lat = loc.latitude lon = loc.longitude url = 'https://api.openweathermap.org/data/2.5/onecall?lat={}&units=metric&lon={}&appid={}'.format(lat, lon, api_key) r = requests.get(url) weather = r.json()['current'] current_weather['main_weather'] = weather['weather'][0]['main'] current_weather['description'] = weather['weather'][0]['description'] current_weather['temp'] = weather['temp'] current_weather['min_temp'] = r.json()['daily'][0]['temp']['min'] current_weather['max_temp'] = r.json()['daily'][0]['temp']['max'] current_weather['feels_like'] = weather['feels_like'] current_weather['humidity'] = weather['humidity'] current_weather['clouds'] = weather['clouds'] current_weather['wind_speed'] = weather['wind_speed'] current_weather['wind_direction'] = weather['wind_deg'] current_weather['visibility'] = weather['visibility'] current_weather['uv'] = weather['uvi'] current_weather['rain_chance'] = r.json()['hourly'][0]['pop'] current_weathers.append(current_weather) daily_weather = [] daily = r.json()['daily'] for day in daily: dct = {} dct['main_weather'] = day['weather'][0]['main'] dct['description'] = day['weather'][0]['description'] dct['min_temp'] = day['temp']['min'] dct['max_temp'] = day['temp']['max'] daily_weather.append(dct) daily_weathers.append(daily_weather) json.dump( current_weather, open( "csv/current_weather.json", 'w+' )) json.dump( daily_weather, open( "csv/daily_weather.json", 'w+' )) except Exception as e: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] logf.write(str(e)) logf.write('. file: ' + fname) logf.write('. line: ' + str(exc_tb.tb_lineno)) logf.write('. type: ' + str(exc_type)) logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]))) print(e) def updateForex(): api_key = '862dbb6d1101ce0c5136' try: base = 'USD' yesterday = datetime.now() - timedelta(1) str_tod = datetime.strftime(datetime.now(), '%Y-%m-%d') str_yest = datetime.strftime(yesterday, '%Y-%m-%d') url = 'https://api.frankfurter.app/{}..{}?from={}'.format(str_yest, str_tod, base) r = requests.get(url) all_data = r.json() currencies = ['AUD', 'CAD', 'CHF', 'EUR', 'GBP', 'JPY', 'NZD'] c_dict = {} for curr in currencies: current = all_data['rates'][str_tod][curr] yesterday = all_data['rates'][str_yest][curr] change = current - yesterday c_dict[curr] = [current, yesterday] json.dump([base, c_dict], open( "csv/currency.json", 'w+' )) except Exception as e: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] logf.write(str(e)) logf.write('. file: ' + fname) logf.write('. line: ' + str(exc_tb.tb_lineno)) logf.write('. type: ' + str(exc_type)) logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]))) print(e) def updateLeagueTable(api_key, league_id): try: url = 'https://www.thesportsdb.com/api/v1/json/{}/lookuptable.php?l={}&s=2020-2021'.format(api_key, league_id) r = requests.get(url) all_data = r.json() premier_teams = [] for i in range(len(all_data['table'])): team = {} if all_data['table'][i]['strTeam'] == "Calgary Flames": print(all_data['table'][i]['dateUpdated'], all_data['table'][i]['intPoints']) team['name'] = all_data['table'][i]['strTeam'] team['wins'] = all_data['table'][i]['intWin'] team['loss'] = all_data['table'][i]['intLoss'] team['draw'] = all_data['table'][i]['intDraw'] team['played'] = all_data['table'][i]['intPlayed'] team['standing'] = all_data['table'][i]['intRank'] team['points'] = all_data['table'][i]['intPoints'] premier_teams.append(team) if league_id == '4328': league = 'premier_league' elif league_id == '4380': league = 'NHL' elif league_id == '4387': league = 'NBA' elif league_id == '4391': league = 'NFL' json.dump(premier_teams, open( "csv/sports/{}/team_stats.json".format(league), 'w+' )) except Exception as e: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] logf.write(str(e)) logf.write('. file: ' + fname) logf.write('. line: ' + str(exc_tb.tb_lineno)) logf.write('. type: ' + str(exc_type)) logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]))) print(e) def updateLeagueEvents(api_key, league_id, time): try: if time == 'past': url ='https://www.thesportsdb.com/api/v1/json/{}/eventspastleague.php?id={}'.format(api_key, league_id) #last 15 events on the league (premium only) elif time == 'future': url ='https://www.thesportsdb.com/api/v1/json/{}/eventsnextleague.php?id={}'.format(api_key, league_id) #next 15 events on the league (premium only) elif time == 'live': url = 'https://thesportsdb.com/api/v2/json/{}/livescore.php?l={}'.format(api_key, league_id) r = requests.get(url) all_data = r.json() print() print(all_data['events']) #print([all_data['events'][i]['strTimestamp'] for i in range(len(all_data['events']))]) events = [] if not all_data['events'] is None: for i in range(len(all_data['events'])): event = {} event['date'] = all_data['events'][i]['dateEvent'] if time == 'live': event['time'] = all_data['events'][i]['strEventTime'] event['progess'] = all_data['events'][i]['strProgress'] event['status'] = all_data['events'][i]['strStatus'] else: event['time'] = all_data['events'][i]['strTime'] event['round'] = all_data['events'][i]['intRound'] event['home_team'] = all_data['events'][i]['strHomeTeam'] event['home_score'] = all_data['events'][i]['intHomeScore'] event['away_team'] = all_data['events'][i]['strAwayTeam'] event['away_score'] = all_data['events'][i]['intAwayScore'] events.append(event) if league_id == '4328': league = 'premier_league' elif league_id == '4380': league = 'NHL' elif league_id == '4387': league = 'NBA' elif league_id == '4391': league = 'NFL' json.dump(events, open( "csv/sports/{}/{}_games.json".format(league, time), 'w+' )) except Exception as e: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] logf.write(str(e)) logf.write('. file: ' + fname) logf.write('. line: ' + str(exc_tb.tb_lineno)) logf.write('. type: ' + str(exc_type)) logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]))) print(e) def updateSports(): #read user settings to decide which sprots to update api_key = '97436974' prem_id = '4328' #prem NHL_id = '4380' NBA_id = '4387' #prem NFL_id = '4391' for i in [NHL_id, prem_id]: updateLeagueEvents(api_key, i, 'live') updateLeagueEvents(api_key, i, 'past') updateLeagueEvents(api_key, i, 'future') print('sports updated') updateLeagueTable(api_key, prem_id) 'https://www.thesportsdb.com/api/v1/json/{}/eventsnext.php?id=133602'.format(api_key) # next five events by team ID (paid) use this for upcoming team games #url = 'https://www.thesportsdb.com/api/v1/json/{}/eventsseason.php?id=4328&s=2020-2021'.format(api_key) # all past events in premier league url = 'https://www.thesportsdb.com/api/v2/json/{}/livescore.php?l=4380'.format(api_key) #live scores def checkStocks(last_update, update_frequency): NY_time = datetime.now(NY_zone).replace(tzinfo=None) opening = NY_time.replace(hour=9, minute=30, second=0, microsecond=0).replace(tzinfo=None) closing = NY_time.replace(hour=16, minute=0, second=0, microsecond=0).replace(tzinfo=None) symbols, stock_info = readCSV('csv/tickers.csv', max_stocks) updated = False diff = (NY_time - last_update).total_seconds()/60 #minutes if opening < NY_time < closing and datetime.today().weekday() < 5: # we need to do real time updating if diff >= update_frequency: updated = True updateStockPrices() elif emptyInfo(symbols, stock_info): # if theres any empty stocks updated = True updateStockPrices() else: # update if last update was before the previous days closing yday_closing = closing - dt.timedelta(days=1) yday_str = yday_closing.strftime("%d/%m/%Y %H:%M:%S") yday_closing = datetime.strptime(yday_str, "%d/%m/%Y %H:%M:%S") if last_update < yday_closing: updated = True updateStockPrices(symbols) return updated if __name__ == '__main__': logf = open("log.txt", "w") t = time.time() ''' updateStockPrices() print('finnhub:',time.time() -t) t = time.time() updateStockPricesIEX() print('iex:', time.time() -t) sys.exit() ''' max_stocks = 200 max_crypto = 100 newsapi = NewsApiClient(api_key='cf08652bd17647b89aaf469a1a8198a9') update_frequencies = {'stocks':2, 'crypto':10, 'news':120, 'weather': 10, 'sports': 120} #minutes NY_zone = pytz.timezone('America/New_York') CET_zone = pytz.timezone('Europe/Berlin') NY_time = datetime.now(NY_zone) CET_time = datetime.now(CET_zone) NY_str = NY_time.strftime("%d/%m/%Y %H:%M:%S") CET_str = NY_time.strftime("%d/%m/%Y %H:%M:%S") #f = open('csv/last_updates.json', 'w+') #update_times = {'stocks':NY_str, 'crypto':NY_str, 'news':NY_str, 'weather': NY_str, 'forex': CET_str} # all in NY time apart from forex in CET #json.dump(update_times, f) #f.close() f = open('csv/last_updates.json', 'r') last_updates = json.load(f) f.close() t = time.time() try: while True: NY_time = datetime.now(NY_zone).replace(tzinfo=None) msg = getInput() #stocks stock_time = datetime.strptime(last_updates['stocks'], "%d/%m/%Y %H:%M:%S") stock_frequency = update_frequencies['stocks'] if checkStocks(stock_time, stock_frequency): stock_time = NY_time.strftime("%d/%m/%Y %H:%M:%S") last_updates['stocks'] = stock_time # crypto crypto_time = datetime.strptime(last_updates['crypto'], "%d/%m/%Y %H:%M:%S") NY_time = datetime.now(NY_zone).replace(tzinfo=None) diff = (NY_time - crypto_time).total_seconds()/60 #minutes if diff >= update_frequencies['crypto']: crypto_time = NY_time.strftime("%d/%m/%Y %H:%M:%S") updateCrypto() last_updates['crypto'] = crypto_time # weather weather_time = datetime.strptime(last_updates['weather'], "%d/%m/%Y %H:%M:%S") NY_time = datetime.now(NY_zone).replace(tzinfo=None) diff = (NY_time - weather_time).total_seconds()/60 #minutes if diff >= update_frequencies['weather']: weather_time = NY_time.strftime("%d/%m/%Y %H:%M:%S") updateWeather() last_updates['weather'] = weather_time # news news_time = datetime.strptime(last_updates['news'], "%d/%m/%Y %H:%M:%S") NY_time = datetime.now(NY_zone).replace(tzinfo=None) diff = (NY_time - news_time).total_seconds()/60 #minutes if diff >= update_frequencies['news']: news_time = NY_time.strftime("%d/%m/%Y %H:%M:%S") updateNews() last_updates['news'] = news_time # sports sports_time = datetime.strptime(last_updates['sports'], "%d/%m/%Y %H:%M:%S") NY_time = datetime.now(NY_zone).replace(tzinfo=None) diff = (NY_time - sports_time).total_seconds()/60 #minutes if diff >= update_frequencies['sports']: sports_time = NY_time.strftime("%d/%m/%Y %H:%M:%S") updateSports() last_updates['sports'] = sports_time #forex updates once every 24hours at 1700 CET # update if last update was before the previous days closing forex_time = datetime.strptime(last_updates['forex'], "%d/%m/%Y %H:%M:%S") CET_time = datetime.now(CET_zone) yday_update = (CET_time.replace(hour=17, minute=00, second=0, microsecond=0) - dt.timedelta(days=1)).replace(tzinfo=None) if forex_time < yday_update: forex_time = CET_time.strftime("%d/%m/%Y %H:%M:%S") last_updates['forex'] = forex_time updateForex() f = open('csv/last_updates.json', 'w+') json.dump(last_updates, f) f.close() except Exception as e: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] logf.write(str(e)) logf.write('. file: ' + fname) logf.write('. line: ' + str(exc_tb.tb_lineno)) logf.write('. type: ' + str(exc_type)) logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]))) print(e)