2021-07-31 09:41:33 +00:00
# Copyright (C) 2020 Fintic, finticofficial@gmail.com
#
# This file is part of Fintic project, developed by Neythen Treloar and Justin Dunn
#
# This code can not be copied and/or distributed without the express
# permission of Fintic
2021-05-05 15:22:01 +00:00
import finnhub
import time
import csv
2021-05-05 19:26:56 +00:00
import pytz
2021-06-09 18:06:21 +00:00
from datetime import datetime , timedelta
2021-05-27 19:10:57 +00:00
import json
2021-05-06 19:59:27 +00:00
import datetime as dt
2021-05-27 19:10:57 +00:00
import sys , os , base64 , hashlib , hmac , select
2021-05-08 11:10:05 +00:00
import requests
2021-05-14 12:02:22 +00:00
from pycoingecko import CoinGeckoAPI
2021-05-21 13:24:37 +00:00
from newsapi import NewsApiClient
2021-06-28 19:36:29 +00:00
import traceback
from geopy import geocoders
2021-05-21 13:24:37 +00:00
2021-05-27 19:10:57 +00:00
def getInput ( Block = False ) :
if Block or select . select ( [ sys . stdin ] , [ ] , [ ] , 0 ) == ( [ sys . stdin ] , [ ] , [ ] ) :
msg = sys . stdin . read ( 1 )
#sys.stdin.flush()
else :
msg = ' '
return msg
2021-05-05 19:26:56 +00:00
2021-05-06 19:59:27 +00:00
def emptyInfo ( symbols , stock_info ) :
update = False
for symbol in symbols :
2021-09-23 19:44:14 +00:00
if stock_info [ symbol ] == - 1 : # stock with no info
2021-05-06 19:59:27 +00:00
update = True
return update
def updateUpdate ( NY_time ) :
NY_str = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
f = open ( ' csv/last_update.csv ' , ' w+ ' )
f . write ( NY_str + ' \n ' )
f . close ( )
2021-05-05 19:26:56 +00:00
2021-09-25 09:40:29 +00:00
def updateStocksFinhubb ( ) :
2021-06-29 20:17:52 +00:00
max_stocks = 200
2021-06-27 11:07:47 +00:00
finnhubsandboxAPIkey = " sandbox_c24qddqad3ickpckgg8g " #Finnhub
finnhubAPIkey = " c24qddqad3ickpckgg80 " #Finnhub
finnhubClient = finnhub . Client ( api_key = finnhubAPIkey )
2021-05-06 19:59:27 +00:00
2021-06-27 11:07:47 +00:00
2021-09-23 19:44:14 +00:00
symbols , stock_info = readJSON ( ' csv/tickers.csv ' , max_stocks )
2021-05-06 19:59:27 +00:00
try :
2021-05-08 11:10:05 +00:00
quotes = [ finnhubClient . quote ( symbol ) for symbol in symbols ]
current_prices = [ quote [ ' c ' ] for quote in quotes ]
opening_prices = [ quote [ ' o ' ] for quote in quotes ]
2021-05-25 18:57:34 +00:00
2021-05-14 12:02:22 +00:00
2021-05-06 19:59:27 +00:00
CSV = open ( ' csv/tickers.csv ' , ' w+ ' )
CSV . write ( ' name,current,opening \n ' )
for i , symbol in enumerate ( symbols ) :
2021-05-07 18:39:42 +00:00
2021-05-06 19:59:27 +00:00
CSV . write ( symbol + ' , ' + str ( current_prices [ i ] ) + ' , ' + str ( opening_prices [ i ] ) + ' \n ' )
CSV . close ( )
2021-07-08 18:42:21 +00:00
2021-05-06 19:59:27 +00:00
except Exception as e :
2021-06-28 19:36:29 +00:00
exc_type , exc_obj , exc_tb = sys . exc_info ( )
fname = os . path . split ( exc_tb . tb_frame . f_code . co_filename ) [ 1 ]
logf . write ( str ( e ) )
logf . write ( ' . file: ' + fname )
logf . write ( ' . line: ' + str ( exc_tb . tb_lineno ) )
logf . write ( ' . type: ' + str ( exc_type ) )
logf . write ( ' \n ' + " " . join ( traceback . format_exception ( sys . exc_info ( ) [ 0 ] , sys . exc_info ( ) [ 1 ] , sys . exc_info ( ) [ 2 ] ) ) )
2021-07-08 18:42:21 +00:00
2021-09-25 09:40:29 +00:00
def updateStocks ( ) :
2021-06-28 19:36:29 +00:00
2021-07-31 09:41:33 +00:00
iexAPIkey = ' pk_d066d39789bd41caac209bca850a35db ' #IEX
2021-06-27 11:07:47 +00:00
max_stocks = 200
2021-09-25 09:40:29 +00:00
f = open ( ' csv/stocks_settings.json ' , ' r ' )
all_stocks_settings = json . load ( f )
f . close ( )
stock_info = all_stocks_settings [ ' symbols ' ]
symbols = list ( stock_info . keys ( ) )
2021-06-19 09:53:09 +00:00
try :
2021-07-08 18:42:21 +00:00
2021-05-08 11:10:05 +00:00
2021-06-29 20:17:52 +00:00
current_prices = [ ]
opening_prices = [ ]
2021-06-19 09:53:09 +00:00
for symbol in symbols :
2021-07-08 18:42:21 +00:00
2021-06-19 09:53:09 +00:00
method = ' GET '
host = ' https://cloud.iexapis.com/stable '
2021-06-29 20:17:52 +00:00
2021-06-19 09:53:09 +00:00
intradayEndpoint = ' /stock/ ' + symbol + ' /intraday-prices '
querystring = ' ?chartIEXOnly=true&token= ' + iexAPIkey
intraday_request_url = host + intradayEndpoint + querystring
2021-07-06 19:32:34 +00:00
2021-09-23 19:44:14 +00:00
intraday_response = requests . get ( intraday_request_url )
2021-07-06 19:32:34 +00:00
for i in range ( len ( intraday_response . json ( ) ) ) :
opn = intraday_response . json ( ) [ i ] [ ' open ' ]
if opn is not None :
break
for i in range ( len ( intraday_response . json ( ) ) - 1 , 0 , - 1 ) :
current = intraday_response . json ( ) [ i ] [ ' close ' ]
if current is not None :
break
2021-06-29 20:17:52 +00:00
opening_prices . append ( opn )
current_prices . append ( current )
2021-09-23 19:44:14 +00:00
stock_info = { }
2021-06-19 09:53:09 +00:00
for i , symbol in enumerate ( symbols ) :
2021-09-23 19:44:14 +00:00
stock_info [ symbol ] = { ' current ' : current_prices [ i ] , ' opening ' : opening_prices [ i ] }
2021-09-25 09:40:29 +00:00
2021-09-23 19:44:14 +00:00
all_stocks_settings [ ' symbols ' ] = stock_info
json . dump ( all_stocks_settings , open ( ' csv/stocks_settings.json ' , ' w+ ' ) )
2021-06-29 20:17:52 +00:00
2021-06-19 09:53:09 +00:00
except Exception as e :
2021-06-28 19:36:29 +00:00
exc_type , exc_obj , exc_tb = sys . exc_info ( )
fname = os . path . split ( exc_tb . tb_frame . f_code . co_filename ) [ 1 ]
logf . write ( str ( e ) )
logf . write ( ' . file: ' + fname )
logf . write ( ' . line: ' + str ( exc_tb . tb_lineno ) )
logf . write ( ' . type: ' + str ( exc_type ) )
logf . write ( ' \n ' + " " . join ( traceback . format_exception ( sys . exc_info ( ) [ 0 ] , sys . exc_info ( ) [ 1 ] , sys . exc_info ( ) [ 2 ] ) ) )
2021-07-05 18:51:40 +00:00
2021-05-14 12:02:22 +00:00
2021-06-27 11:07:47 +00:00
def updateCrypto ( ) :
coingecko_client = CoinGeckoAPI ( )
2021-09-25 09:40:29 +00:00
f = open ( ' csv/crypto_settings.json ' , ' r ' )
all_crypto_settings = json . load ( f )
f . close ( )
coin_info = all_crypto_settings [ ' symbols ' ]
symbol_base = list ( coin_info . keys ( ) )
2021-09-23 19:44:14 +00:00
symbols = [ sb . split ( ' , ' ) [ 0 ] for sb in symbol_base ]
bases = [ sb . split ( ' , ' ) [ 1 ] for sb in symbol_base ]
unique_bases = list ( set ( bases ) )
2021-09-25 09:40:29 +00:00
2021-09-23 19:44:14 +00:00
coins = [ ]
2021-09-25 09:40:29 +00:00
# coingecko rate limited me from calling this too often
#coin_list = coingecko_client.get_coins_list()
#json.dump(coin_list, open('csv/coin_list.json', 'w+'))
f = open ( ' csv/coin_list.json ' , ' r ' )
coin_list = json . load ( f )
f . close ( )
# this might be super slow as coin_list is large
for s in symbols :
for c in coin_list :
if c [ ' symbol ' ] . upper ( ) == s and c [ ' id ' ] != ' binance-peg-cardano ' : # hackaround for two coins with symbol ada:
coins . append ( c [ ' id ' ] )
2021-06-27 11:07:47 +00:00
2021-09-23 19:44:14 +00:00
crypto_info = { }
print ( coins )
2021-09-25 09:40:29 +00:00
#cypto_info['symbol, base'].keys() = ['current','24hr change']
2021-06-19 09:53:09 +00:00
try :
response = coingecko_client . get_price ( ids = ' , ' . join ( coins ) , vs_currencies = unique_bases , include_24hr_change = True )
2021-09-23 19:44:14 +00:00
2021-09-25 09:40:29 +00:00
#print(response)
2021-05-25 18:57:34 +00:00
2021-09-25 09:40:29 +00:00
for i , sb in enumerate ( symbol_base ) :
#coin_info[name] = [symbol, base]
2021-09-23 19:44:14 +00:00
#info = coin_info[coin]
2021-09-25 09:40:29 +00:00
#CSV.write(info[0] + ',' + coin + ',' + info[1] + ',' +str(response[coin][info[1]]) + ',' + str(response[coin]['usd_24h_change']) + '\n')
crypto_info [ sb ] = { ' current ' : response [ coins [ i ] ] [ bases [ i ] . lower ( ) ] , ' 24hr_change ' : response [ coins [ i ] ] [ ' usd_24h_change ' ] }
2021-06-19 09:53:09 +00:00
2021-09-25 09:40:29 +00:00
all_crypto_settings [ ' symbols ' ] = crypto_info
json . dump ( all_crypto_settings , open ( ' csv/crypto_settings.json ' , ' w+ ' ) )
except Exception as e :
exc_type , exc_obj , exc_tb = sys . exc_info ( )
fname = os . path . split ( exc_tb . tb_frame . f_code . co_filename ) [ 1 ]
logf . write ( str ( e ) )
logf . write ( ' . file: ' + fname )
logf . write ( ' . line: ' + str ( exc_tb . tb_lineno ) )
logf . write ( ' . type: ' + str ( exc_type ) )
logf . write ( ' \n ' + " " . join ( traceback . format_exception ( sys . exc_info ( ) [ 0 ] , sys . exc_info ( ) [ 1 ] , sys . exc_info ( ) [ 2 ] ) ) )
def updateForex ( ) :
f = open ( ' csv/forex_settings.json ' , ' r ' )
all_forex_settings = json . load ( f )
f . close ( )
forex_info = all_forex_settings [ ' symbols ' ]
symbol_base = list ( forex_info . keys ( ) )
symbols = [ sb . split ( ' , ' ) [ 0 ] for sb in symbol_base ]
bases = [ sb . split ( ' , ' ) [ 1 ] for sb in symbol_base ]
unique_bases = list ( set ( bases ) )
all_responses = [ ]
# get timeseries from two days ago until today in case it hasnt updated for today yet
yesterday = datetime . now ( ) - timedelta ( 1 )
yesteryesterday = datetime . now ( ) - timedelta ( 2 )
str_tod = datetime . strftime ( datetime . now ( ) , ' % Y- % m- %d ' )
str_yest = datetime . strftime ( yesterday , ' % Y- % m- %d ' )
str_yestyest = datetime . strftime ( yesteryesterday , ' % Y- % m- %d ' )
try :
for base in unique_bases :
url = ' https://api.frankfurter.app/ {} .. {} ?from= {} ' . format ( str_yestyest , str_tod , base )
r = requests . get ( url )
all_data = r . json ( )
all_responses . append ( all_data )
c_dict = { }
for i , curr in enumerate ( symbols ) :
for response in all_responses :
if response [ ' base ' ] == bases [ i ] :
print ( response [ ' rates ' ] )
try :
current = response [ ' rates ' ] [ str_tod ] [ curr ]
yesterday = response [ ' rates ' ] [ str_yest ] [ curr ]
except KeyError :
# if it hasnt been updated for today yet use yesterdays price
current = response [ ' rates ' ] [ str_yest ] [ curr ]
yesterday = response [ ' rates ' ] [ str_yestyest ] [ curr ]
change = current - yesterday
c_dict [ symbol_base [ i ] ] = { ' current ' : current , ' 24hr_change ' : change }
all_forex_settings [ ' symbols ' ] = c_dict
json . dump ( all_forex_settings , open ( " csv/forex_settings.json " , ' w+ ' ) )
2021-06-19 09:53:09 +00:00
except Exception as e :
2021-06-28 19:36:29 +00:00
exc_type , exc_obj , exc_tb = sys . exc_info ( )
fname = os . path . split ( exc_tb . tb_frame . f_code . co_filename ) [ 1 ]
logf . write ( str ( e ) )
logf . write ( ' . file: ' + fname )
logf . write ( ' . line: ' + str ( exc_tb . tb_lineno ) )
logf . write ( ' . type: ' + str ( exc_type ) )
logf . write ( ' \n ' + " " . join ( traceback . format_exception ( sys . exc_info ( ) [ 0 ] , sys . exc_info ( ) [ 1 ] , sys . exc_info ( ) [ 2 ] ) ) )
2021-05-21 13:24:37 +00:00
def updateNews ( ) :
2021-06-30 19:22:14 +00:00
max_per_cat = 10
2021-05-27 19:10:57 +00:00
try :
2021-09-26 09:32:27 +00:00
all_settings = json . load ( open ( ' csv/news_settings.json ' , ' r ' ) )
2021-06-19 09:53:09 +00:00
try :
#load user settings
2021-06-30 19:22:14 +00:00
headlines = [ ]
2021-09-26 09:32:27 +00:00
if all_settings [ ' use_sources ' ] :
arg_dict = { ' sources ' : str ( all_settings [ ' sources ' ] ) }
else :
arg_dict = { ' country ' : all_settings [ ' country ' ] , ' category ' : all_settings [ ' category ' ] }
2021-09-23 19:44:14 +00:00
2021-05-27 19:10:57 +00:00
2021-09-26 09:32:27 +00:00
h = newsapi . get_top_headlines ( * * arg_dict )
if len ( h ) > max_per_cat :
h = h [ 0 : max_per_cat ]
headlines . append ( h )
except Exception as e :
print ( ' news ettings not used ' , e )
2021-06-19 09:53:09 +00:00
#if no settings just get top headlines
headlines = newsapi . get_top_headlines ( )
headline_titles = [ headline [ ' title ' ] for headline in headlines [ ' articles ' ] ]
headline_sources = [ headline [ ' source ' ] [ ' name ' ] for headline in headlines [ ' articles ' ] ]
headline_times = [ headline [ ' publishedAt ' ] for headline in headlines [ ' articles ' ] ]
2021-05-21 13:24:37 +00:00
2021-09-26 09:32:27 +00:00
headlines = list ( zip ( headline_titles , headline_sources , headline_times ) )
all_settings [ ' headlines ' ] = headlines
json . dump ( all_settings , open ( ' csv/news_settings.json ' , ' w+ ' ) )
2021-06-19 09:53:09 +00:00
2021-09-26 09:32:27 +00:00
2021-06-19 09:53:09 +00:00
except Exception as e :
2021-06-28 19:36:29 +00:00
exc_type , exc_obj , exc_tb = sys . exc_info ( )
fname = os . path . split ( exc_tb . tb_frame . f_code . co_filename ) [ 1 ]
logf . write ( str ( e ) )
logf . write ( ' . file: ' + fname )
logf . write ( ' . line: ' + str ( exc_tb . tb_lineno ) )
logf . write ( ' . type: ' + str ( exc_type ) )
logf . write ( ' \n ' + " " . join ( traceback . format_exception ( sys . exc_info ( ) [ 0 ] , sys . exc_info ( ) [ 1 ] , sys . exc_info ( ) [ 2 ] ) ) )
2021-07-08 18:42:21 +00:00
2021-05-21 13:24:37 +00:00
2021-06-27 11:07:47 +00:00
def updateWeather ( ) :
2021-06-28 19:36:29 +00:00
max_cities = 30
2021-07-31 09:41:33 +00:00
api_key = ' a9476947fa1a2f712076453bec4a0df5 '
2021-06-19 09:53:09 +00:00
try :
2021-06-28 19:36:29 +00:00
gn = geocoders . GeoNames ( username = ' fintic ' )
2021-09-25 09:40:29 +00:00
f = open ( ' csv/daily_weather.json ' , ' r ' )
all_daily_settings = json . load ( f )
f . close ( )
2021-06-28 19:36:29 +00:00
2021-10-04 20:39:22 +00:00
f = open ( ' csv/current_weather.json ' , ' r ' )
2021-09-25 09:40:29 +00:00
all_current_settings = json . load ( f )
2021-06-19 09:53:09 +00:00
f . close ( )
2021-09-25 09:40:29 +00:00
current_locations = list ( all_current_settings [ ' locations ' ] . keys ( ) )
daily_locations = list ( all_daily_settings [ ' locations ' ] . keys ( ) )
all_locations = list ( set ( current_locations + daily_locations ) )
2021-06-19 09:53:09 +00:00
2021-09-25 09:40:29 +00:00
current_weathers = { }
daily_weathers = { }
for location in all_locations :
2021-06-28 19:36:29 +00:00
loc = gn . geocode ( location )
current_weather = { }
lat = loc . latitude
lon = loc . longitude
url = ' https://api.openweathermap.org/data/2.5/onecall?lat= {} &units=metric&lon= {} &appid= {} ' . format ( lat , lon , api_key )
r = requests . get ( url )
weather = r . json ( ) [ ' current ' ]
current_weather [ ' main_weather ' ] = weather [ ' weather ' ] [ 0 ] [ ' main ' ]
current_weather [ ' description ' ] = weather [ ' weather ' ] [ 0 ] [ ' description ' ]
current_weather [ ' temp ' ] = weather [ ' temp ' ]
current_weather [ ' min_temp ' ] = r . json ( ) [ ' daily ' ] [ 0 ] [ ' temp ' ] [ ' min ' ]
current_weather [ ' max_temp ' ] = r . json ( ) [ ' daily ' ] [ 0 ] [ ' temp ' ] [ ' max ' ]
current_weather [ ' feels_like ' ] = weather [ ' feels_like ' ]
current_weather [ ' humidity ' ] = weather [ ' humidity ' ]
current_weather [ ' clouds ' ] = weather [ ' clouds ' ]
current_weather [ ' wind_speed ' ] = weather [ ' wind_speed ' ]
current_weather [ ' wind_direction ' ] = weather [ ' wind_deg ' ]
current_weather [ ' visibility ' ] = weather [ ' visibility ' ]
current_weather [ ' uv ' ] = weather [ ' uvi ' ]
current_weather [ ' rain_chance ' ] = r . json ( ) [ ' hourly ' ] [ 0 ] [ ' pop ' ]
2021-10-04 20:39:22 +00:00
current_weathers [ location ] = current_weather
2021-06-28 19:36:29 +00:00
daily_weather = [ ]
daily = r . json ( ) [ ' daily ' ]
for day in daily :
dct = { }
dct [ ' main_weather ' ] = day [ ' weather ' ] [ 0 ] [ ' main ' ]
dct [ ' description ' ] = day [ ' weather ' ] [ 0 ] [ ' description ' ]
dct [ ' min_temp ' ] = day [ ' temp ' ] [ ' min ' ]
dct [ ' max_temp ' ] = day [ ' temp ' ] [ ' max ' ]
daily_weather . append ( dct )
2021-07-06 19:15:05 +00:00
2021-10-04 20:39:22 +00:00
daily_weathers [ location ] = daily_weather
2021-09-25 09:40:29 +00:00
2021-07-06 19:15:05 +00:00
2021-09-25 09:40:29 +00:00
all_current_settings [ ' locations ' ] = current_weathers
all_daily_settings [ ' locations ' ] = daily_weathers
json . dump ( all_current_settings , open ( " csv/current_weather.json " , ' w+ ' ) )
json . dump ( all_daily_settings , open ( " csv/daily_weather.json " , ' w+ ' ) )
2021-06-02 20:16:15 +00:00
2021-06-19 09:53:09 +00:00
except Exception as e :
2021-06-28 19:36:29 +00:00
exc_type , exc_obj , exc_tb = sys . exc_info ( )
fname = os . path . split ( exc_tb . tb_frame . f_code . co_filename ) [ 1 ]
logf . write ( str ( e ) )
logf . write ( ' . file: ' + fname )
logf . write ( ' . line: ' + str ( exc_tb . tb_lineno ) )
logf . write ( ' . type: ' + str ( exc_type ) )
logf . write ( ' \n ' + " " . join ( traceback . format_exception ( sys . exc_info ( ) [ 0 ] , sys . exc_info ( ) [ 1 ] , sys . exc_info ( ) [ 2 ] ) ) )
2021-06-09 18:06:21 +00:00
2021-06-19 09:53:09 +00:00
2021-09-25 09:40:29 +00:00
def updateLeagueTables ( api_key , league_ids ) :
2021-06-09 18:06:21 +00:00
2021-09-25 09:40:29 +00:00
f = open ( ' csv/league_tables.json ' , ' r ' )
all_settings = json . load ( f )
f . close ( )
leagues = all_settings [ ' leagues ' ] . keys ( )
leagues_info = { }
2021-06-28 19:36:29 +00:00
try :
2021-09-25 09:40:29 +00:00
for league in leagues :
league_id = league_ids [ league ]
url = ' https://www.thesportsdb.com/api/v1/json/ {} /lookuptable.php?l= {} &s=2020-2021 ' . format ( api_key , league_id )
2021-06-28 19:36:29 +00:00
2021-09-25 09:40:29 +00:00
r = requests . get ( url )
try :
all_data = r . json ( )
except : # there is no data available
continue
teams = [ ]
for i in range ( len ( all_data [ ' table ' ] ) ) :
team = { }
2021-06-28 19:36:29 +00:00
2021-09-25 09:40:29 +00:00
team [ ' name ' ] = all_data [ ' table ' ] [ i ] [ ' strTeam ' ]
team [ ' wins ' ] = all_data [ ' table ' ] [ i ] [ ' intWin ' ]
team [ ' loss ' ] = all_data [ ' table ' ] [ i ] [ ' intLoss ' ]
team [ ' draw ' ] = all_data [ ' table ' ] [ i ] [ ' intDraw ' ]
team [ ' played ' ] = all_data [ ' table ' ] [ i ] [ ' intPlayed ' ]
team [ ' standing ' ] = all_data [ ' table ' ] [ i ] [ ' intRank ' ]
team [ ' points ' ] = all_data [ ' table ' ] [ i ] [ ' intPoints ' ]
teams . append ( team )
leagues_info [ league ] = teams
all_settings [ ' leagues ' ] = leagues_info
json . dump ( all_settings , open ( " csv/league_tables.json " . format ( league ) , ' w+ ' ) )
2021-06-28 19:36:29 +00:00
except Exception as e :
exc_type , exc_obj , exc_tb = sys . exc_info ( )
fname = os . path . split ( exc_tb . tb_frame . f_code . co_filename ) [ 1 ]
logf . write ( str ( e ) )
logf . write ( ' . file: ' + fname )
logf . write ( ' . line: ' + str ( exc_tb . tb_lineno ) )
logf . write ( ' . type: ' + str ( exc_type ) )
logf . write ( ' \n ' + " " . join ( traceback . format_exception ( sys . exc_info ( ) [ 0 ] , sys . exc_info ( ) [ 1 ] , sys . exc_info ( ) [ 2 ] ) ) )
2021-07-08 18:42:21 +00:00
2021-06-17 19:06:23 +00:00
2021-09-25 09:40:29 +00:00
def updateLeagueEvents ( api_key , league_ids , time ) :
2021-06-17 19:06:23 +00:00
2021-09-25 09:40:29 +00:00
if time == ' past ' :
url = ' https://www.thesportsdb.com/api/v1/json/ {} /eventspastleague.php?id= {} ' #last 15 events on the league (premium only)
f = open ( ' csv/past_games.json ' )
elif time == ' upcoming ' :
url = ' https://www.thesportsdb.com/api/v1/json/ {} /eventsnextleague.php?id= {} ' #next 15 events on the league (premium only)
f = open ( ' csv/upcoming_games.json ' )
elif time == ' live ' :
f = open ( ' csv/live_games.json ' )
url = ' https://thesportsdb.com/api/v2/json/ {} /livescore.php?l= {} '
2021-06-28 19:36:29 +00:00
2021-09-25 09:40:29 +00:00
all_settings = json . load ( f )
f . close ( )
leagues = all_settings [ ' leagues ' ] . keys ( )
leagues_info = { }
try :
for league in leagues :
league_id = league_ids [ league ]
url = url . format ( api_key , league_id )
2021-06-17 19:06:23 +00:00
2021-09-25 09:40:29 +00:00
r = requests . get ( url )
try :
all_data = r . json ( )
except : # there is no data available
continue
events = [ ]
if not all_data [ ' events ' ] is None :
2021-06-28 19:36:29 +00:00
2021-09-25 09:40:29 +00:00
for i in range ( len ( all_data [ ' events ' ] ) ) :
event = { }
event [ ' date ' ] = all_data [ ' events ' ] [ i ] [ ' dateEvent ' ]
if time == ' live ' :
event [ ' time ' ] = all_data [ ' events ' ] [ i ] [ ' strEventTime ' ]
event [ ' progess ' ] = all_data [ ' events ' ] [ i ] [ ' strProgress ' ]
event [ ' status ' ] = all_data [ ' events ' ] [ i ] [ ' strStatus ' ]
else :
event [ ' time ' ] = all_data [ ' events ' ] [ i ] [ ' strTime ' ]
event [ ' round ' ] = all_data [ ' events ' ] [ i ] [ ' intRound ' ]
event [ ' home_team ' ] = all_data [ ' events ' ] [ i ] [ ' strHomeTeam ' ]
event [ ' home_score ' ] = all_data [ ' events ' ] [ i ] [ ' intHomeScore ' ]
event [ ' away_team ' ] = all_data [ ' events ' ] [ i ] [ ' strAwayTeam ' ]
event [ ' away_score ' ] = all_data [ ' events ' ] [ i ] [ ' intAwayScore ' ]
events . append ( event )
leagues_info [ league ] = events
all_settings [ ' leagues ' ] = leagues_info
2021-06-28 19:36:29 +00:00
2021-09-25 09:40:29 +00:00
json . dump ( all_settings , open ( " csv/ {} _games.json " . format ( time ) , ' w+ ' ) )
2021-06-28 19:36:29 +00:00
except Exception as e :
exc_type , exc_obj , exc_tb = sys . exc_info ( )
fname = os . path . split ( exc_tb . tb_frame . f_code . co_filename ) [ 1 ]
logf . write ( str ( e ) )
logf . write ( ' . file: ' + fname )
logf . write ( ' . line: ' + str ( exc_tb . tb_lineno ) )
logf . write ( ' . type: ' + str ( exc_type ) )
logf . write ( ' \n ' + " " . join ( traceback . format_exception ( sys . exc_info ( ) [ 0 ] , sys . exc_info ( ) [ 1 ] , sys . exc_info ( ) [ 2 ] ) ) )
2021-07-08 18:42:21 +00:00
2021-06-19 09:53:09 +00:00
2021-06-27 11:07:47 +00:00
def updateSports ( ) :
#read user settings to decide which sprots to update
api_key = ' 97436974 '
2021-06-14 19:36:17 +00:00
2021-06-21 16:39:44 +00:00
2021-06-17 19:06:23 +00:00
2021-09-25 09:40:29 +00:00
league_ids = { ' Premier League ' : ' 4328 ' , ' NHL ' : ' 4380 ' , ' NBA ' : ' 4387 ' , ' NFL ' : ' 4391 ' }
updateLeagueTables ( api_key , league_ids )
2021-06-17 19:06:23 +00:00
2021-09-25 09:40:29 +00:00
updateLeagueEvents ( api_key , league_ids , ' live ' )
updateLeagueEvents ( api_key , league_ids , ' past ' )
updateLeagueEvents ( api_key , league_ids , ' upcoming ' )
2021-07-08 18:42:21 +00:00
2021-09-25 09:40:29 +00:00
2021-06-19 09:53:09 +00:00
2021-06-17 19:06:23 +00:00
' https://www.thesportsdb.com/api/v1/json/ {} /eventsnext.php?id=133602 ' . format ( api_key ) # next five events by team ID (paid) use this for upcoming team games
#url = 'https://www.thesportsdb.com/api/v1/json/{}/eventsseason.php?id=4328&s=2020-2021'.format(api_key) # all past events in premier league
2021-06-16 18:23:14 +00:00
url = ' https://www.thesportsdb.com/api/v2/json/ {} /livescore.php?l=4380 ' . format ( api_key ) #live scores
2021-06-17 19:06:23 +00:00
2021-06-14 19:36:17 +00:00
2021-06-27 11:07:47 +00:00
def checkStocks ( last_update , update_frequency ) :
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
opening = NY_time . replace ( hour = 9 , minute = 30 , second = 0 , microsecond = 0 ) . replace ( tzinfo = None )
closing = NY_time . replace ( hour = 16 , minute = 0 , second = 0 , microsecond = 0 ) . replace ( tzinfo = None )
2021-06-14 19:36:17 +00:00
2021-10-04 20:39:22 +00:00
f = open ( ' csv/stocks_settings.json ' , ' r ' )
all_stocks_settings = json . load ( f )
f . close ( )
stock_info = all_stocks_settings [ ' symbols ' ]
symbols = list ( stock_info . keys ( ) )
2021-06-14 19:36:17 +00:00
2021-06-27 11:07:47 +00:00
updated = False
diff = ( NY_time - last_update ) . total_seconds ( ) / 60 #minutes
if opening < NY_time < closing and datetime . today ( ) . weekday ( ) < 5 : # we need to do real time updating
if diff > = update_frequency :
updated = True
2021-09-25 09:40:29 +00:00
updateStocks ( )
2021-06-27 11:07:47 +00:00
elif emptyInfo ( symbols , stock_info ) : # if theres any empty stocks
updated = True
2021-09-25 09:40:29 +00:00
updateStocks ( )
2021-06-27 11:07:47 +00:00
2021-05-21 13:24:37 +00:00
2021-06-27 11:07:47 +00:00
else :
# update if last update was before the previous days closing
yday_closing = closing - dt . timedelta ( days = 1 )
yday_str = yday_closing . strftime ( " %d / % m/ % Y % H: % M: % S " )
yday_closing = datetime . strptime ( yday_str , " %d / % m/ % Y % H: % M: % S " )
if last_update < yday_closing :
updated = True
2021-09-25 09:40:29 +00:00
updateStocks ( )
2021-06-27 11:07:47 +00:00
return updated
if __name__ == ' __main__ ' :
2021-06-29 20:17:52 +00:00
logf = open ( " log.txt " , " w " )
t = time . time ( )
2021-07-08 18:42:21 +00:00
2021-05-08 11:10:05 +00:00
max_stocks = 200
2021-06-22 19:05:56 +00:00
max_crypto = 100
2021-05-07 18:39:42 +00:00
2021-09-26 09:32:27 +00:00
2021-07-05 18:51:40 +00:00
2021-06-27 11:07:47 +00:00
newsapi = NewsApiClient ( api_key = ' cf08652bd17647b89aaf469a1a8198a9 ' )
2021-10-04 20:39:22 +00:00
2021-05-07 18:39:42 +00:00
2021-07-08 16:48:03 +00:00
update_frequencies = { ' stocks ' : 2 , ' crypto ' : 10 , ' news ' : 120 , ' weather ' : 120 , ' sports ' : 120 } #minutes
2021-05-05 19:26:56 +00:00
2021-05-06 19:59:27 +00:00
NY_zone = pytz . timezone ( ' America/New_York ' )
2021-06-27 11:07:47 +00:00
CET_zone = pytz . timezone ( ' Europe/Berlin ' )
2021-05-05 19:26:56 +00:00
2021-05-06 19:59:27 +00:00
NY_time = datetime . now ( NY_zone )
2021-06-19 09:53:09 +00:00
2021-06-27 11:07:47 +00:00
CET_time = datetime . now ( CET_zone )
2021-06-19 09:53:09 +00:00
2021-06-27 11:07:47 +00:00
NY_str = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
CET_str = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
2021-06-16 18:23:14 +00:00
2021-06-27 11:07:47 +00:00
#f = open('csv/last_updates.json', 'w+')
#update_times = {'stocks':NY_str, 'crypto':NY_str, 'news':NY_str, 'weather': NY_str, 'forex': CET_str} # all in NY time apart from forex in CET
#json.dump(update_times, f)
#f.close()
2021-07-06 19:32:34 +00:00
2021-07-06 19:15:05 +00:00
try :
f = open ( ' csv/last_updates.json ' , ' r ' )
last_updates = json . load ( f )
f . close ( )
except :
last_updates = { " stocks " : " 27/06/2021 07:05:39 " , " crypto " : " 27/06/2021 07:05:39 " , " news " : " 27/06/2021 07:05:39 " , " weather " : " 27/06/2021 07:05:39 " , " forex " : " 27/06/2021 07:05:39 " , " sports " : " 27/06/2021 07:05:39 " }
2021-05-27 19:10:57 +00:00
t = time . time ( )
2021-06-29 20:17:52 +00:00
2021-05-27 19:10:57 +00:00
try :
while True :
2021-06-27 11:07:47 +00:00
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
2021-05-27 19:10:57 +00:00
msg = getInput ( )
2021-06-27 11:07:47 +00:00
#stocks
stock_time = datetime . strptime ( last_updates [ ' stocks ' ] , " %d / % m/ % Y % H: % M: % S " )
stock_frequency = update_frequencies [ ' stocks ' ]
if checkStocks ( stock_time , stock_frequency ) :
stock_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
last_updates [ ' stocks ' ] = stock_time
2021-05-27 19:10:57 +00:00
2021-06-27 11:07:47 +00:00
# crypto
crypto_time = datetime . strptime ( last_updates [ ' crypto ' ] , " %d / % m/ % Y % H: % M: % S " )
2021-07-05 18:51:40 +00:00
2021-06-27 11:07:47 +00:00
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
diff = ( NY_time - crypto_time ) . total_seconds ( ) / 60 #minutes
if diff > = update_frequencies [ ' crypto ' ] :
crypto_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
updateCrypto ( )
last_updates [ ' crypto ' ] = crypto_time
# weather
weather_time = datetime . strptime ( last_updates [ ' weather ' ] , " %d / % m/ % Y % H: % M: % S " )
2021-07-05 18:51:40 +00:00
2021-06-27 11:07:47 +00:00
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
diff = ( NY_time - weather_time ) . total_seconds ( ) / 60 #minutes
if diff > = update_frequencies [ ' weather ' ] :
weather_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
updateWeather ( )
last_updates [ ' weather ' ] = weather_time
2021-05-27 19:10:57 +00:00
2021-06-28 19:36:29 +00:00
# news
2021-06-27 11:07:47 +00:00
news_time = datetime . strptime ( last_updates [ ' news ' ] , " %d / % m/ % Y % H: % M: % S " )
2021-07-05 18:51:40 +00:00
2021-06-27 11:07:47 +00:00
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
2021-06-28 19:36:29 +00:00
diff = ( NY_time - news_time ) . total_seconds ( ) / 60 #minutes
2021-06-27 11:07:47 +00:00
if diff > = update_frequencies [ ' news ' ] :
news_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
2021-05-27 19:10:57 +00:00
updateNews ( )
2021-06-27 11:07:47 +00:00
last_updates [ ' news ' ] = news_time
2021-05-08 11:10:05 +00:00
2021-07-05 18:51:40 +00:00
# sports
sports_time = datetime . strptime ( last_updates [ ' sports ' ] , " %d / % m/ % Y % H: % M: % S " )
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
diff = ( NY_time - sports_time ) . total_seconds ( ) / 60 #minutes
if diff > = update_frequencies [ ' sports ' ] :
sports_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
updateSports ( )
last_updates [ ' sports ' ] = sports_time
2021-06-27 11:07:47 +00:00
#forex updates once every 24hours at 1700 CET
# update if last update was before the previous days closing
forex_time = datetime . strptime ( last_updates [ ' forex ' ] , " %d / % m/ % Y % H: % M: % S " )
CET_time = datetime . now ( CET_zone )
yday_update = ( CET_time . replace ( hour = 17 , minute = 00 , second = 0 , microsecond = 0 ) - dt . timedelta ( days = 1 ) ) . replace ( tzinfo = None )
if forex_time < yday_update :
forex_time = CET_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
last_updates [ ' forex ' ] = forex_time
updateForex ( )
2021-05-05 19:26:56 +00:00
2021-07-06 19:15:05 +00:00
json . dump ( last_updates , open ( ' csv/last_updates.json ' , ' w+ ' ) )
2021-07-05 18:51:40 +00:00
2021-05-27 19:10:57 +00:00
except Exception as e :
2021-06-28 19:36:29 +00:00
exc_type , exc_obj , exc_tb = sys . exc_info ( )
fname = os . path . split ( exc_tb . tb_frame . f_code . co_filename ) [ 1 ]
2021-06-27 11:07:47 +00:00
logf . write ( str ( e ) )
2021-06-28 19:36:29 +00:00
logf . write ( ' . file: ' + fname )
logf . write ( ' . line: ' + str ( exc_tb . tb_lineno ) )
logf . write ( ' . type: ' + str ( exc_type ) )
logf . write ( ' \n ' + " " . join ( traceback . format_exception ( sys . exc_info ( ) [ 0 ] , sys . exc_info ( ) [ 1 ] , sys . exc_info ( ) [ 2 ] ) ) )
2021-07-08 18:42:21 +00:00
2021-05-05 19:26:56 +00:00
2021-05-27 19:10:57 +00:00