database caller functions added for stocks, forex and news
This commit is contained in:
parent
9056c958e5
commit
a25c82128c
752
database_caller.py
Executable file
752
database_caller.py
Executable file
@ -0,0 +1,752 @@
|
||||
# Copyright (C) 2020 Fintic, finticofficial@gmail.com
|
||||
#
|
||||
# This file is part of Fintic project, developed by Neythen Treloar and Justin Dunn
|
||||
#
|
||||
# This code can not be copied and/or distributed without the express
|
||||
# permission of Fintic
|
||||
|
||||
import finnhub
|
||||
import time
|
||||
import csv
|
||||
import pytz
|
||||
from datetime import datetime, timedelta
|
||||
import json
|
||||
import datetime as dt
|
||||
import sys, os, base64, hashlib, hmac, select
|
||||
import requests
|
||||
from pycoingecko import CoinGeckoAPI
|
||||
from newsapi import NewsApiClient
|
||||
import traceback
|
||||
from geopy import geocoders
|
||||
|
||||
def getInput(Block=False):
|
||||
if Block or select.select([sys.stdin], [], [], 0) == ([sys.stdin], [], []):
|
||||
msg = sys.stdin.read(1)
|
||||
#sys.stdin.flush()
|
||||
else:
|
||||
msg = ''
|
||||
return msg
|
||||
|
||||
def emptyInfo(symbols, stock_info):
|
||||
update = False
|
||||
for symbol in symbols:
|
||||
if stock_info[symbol] == -1: # stock with no info
|
||||
update = True
|
||||
return update
|
||||
|
||||
def updateUpdate(NY_time):
|
||||
NY_str = NY_time.strftime("%d/%m/%Y %H:%M:%S")
|
||||
f = open('csv/last_update.csv', 'w+')
|
||||
f.write(NY_str + '\n')
|
||||
f.close()
|
||||
|
||||
def updateStocksFinhubb():
|
||||
max_stocks = 200
|
||||
finnhubsandboxAPIkey = "sandbox_c24qddqad3ickpckgg8g" #Finnhub
|
||||
finnhubAPIkey = "c24qddqad3ickpckgg80" #Finnhub
|
||||
finnhubClient = finnhub.Client(api_key=finnhubAPIkey)
|
||||
|
||||
|
||||
symbols, stock_info = readJSON('csv/tickers.csv', max_stocks)
|
||||
try:
|
||||
quotes = [finnhubClient.quote(symbol) for symbol in symbols]
|
||||
current_prices = [quote['c'] for quote in quotes]
|
||||
opening_prices = [quote['o'] for quote in quotes]
|
||||
|
||||
|
||||
|
||||
CSV = open('csv/tickers.csv', 'w+')
|
||||
CSV.write('name,current,opening\n')
|
||||
for i, symbol in enumerate(symbols):
|
||||
|
||||
CSV.write(symbol + ',' + str(current_prices[i]) + ',' + str(opening_prices[i]) + '\n')
|
||||
CSV.close()
|
||||
|
||||
|
||||
|
||||
except Exception as e:
|
||||
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
logf.write(str(e))
|
||||
logf.write('. file: ' + fname)
|
||||
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
||||
logf.write('. type: ' + str(exc_type))
|
||||
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
||||
|
||||
def updateStocks():
|
||||
|
||||
|
||||
try:
|
||||
|
||||
|
||||
max_stocks = 200
|
||||
|
||||
f = open('csv/stocks_settings.json', 'r')
|
||||
all_stocks_settings = json.load(f)
|
||||
f.close()
|
||||
stock_info = all_stocks_settings['symbols']
|
||||
symbols = list(stock_info.keys())
|
||||
print(symbols)
|
||||
|
||||
url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/stocks?symbols='
|
||||
|
||||
for symbol in symbols:
|
||||
url += symbol + ','
|
||||
response = requests.get(url)
|
||||
data = response.json()
|
||||
|
||||
|
||||
stock_info = {}
|
||||
|
||||
for stock in data:
|
||||
stock_info[stock['symbol']] = {'current': stock['price'], 'opening': float(stock['price']) - float(stock['change_since'])}
|
||||
|
||||
|
||||
all_stocks_settings['symbols'] = stock_info
|
||||
|
||||
|
||||
|
||||
json.dump(all_stocks_settings, open('csv/stocks_settings.json', 'w+'))
|
||||
|
||||
except Exception as e:
|
||||
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
logf.write(str(e))
|
||||
logf.write('. file: ' + fname)
|
||||
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
||||
logf.write('. type: ' + str(exc_type))
|
||||
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
||||
|
||||
|
||||
def updateCrypto():
|
||||
|
||||
|
||||
#cypto_info['symbol, base'].keys() = ['current','24hr change']
|
||||
try:
|
||||
|
||||
|
||||
url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/crypto?symbols=ETH-BTC'
|
||||
|
||||
|
||||
response = requests.get(url)
|
||||
data = response.json()
|
||||
print(data)
|
||||
sys.exit()
|
||||
|
||||
|
||||
stock_info = {}
|
||||
|
||||
for stock in data:
|
||||
stock_info[stock['symbol']] = {'current': stock['price'], 'opening': float(stock['price']) - float(stock['change_since'])}
|
||||
|
||||
|
||||
coingecko_client = CoinGeckoAPI()
|
||||
|
||||
f = open('csv/crypto_settings.json', 'r')
|
||||
all_crypto_settings = json.load(f)
|
||||
f.close()
|
||||
|
||||
coin_info = all_crypto_settings['symbols']
|
||||
symbol_base = list(coin_info.keys())
|
||||
|
||||
symbols = [sb.split(',')[0] for sb in symbol_base]
|
||||
bases = [sb.split(',')[1] for sb in symbol_base]
|
||||
unique_bases = list(set(bases))
|
||||
|
||||
|
||||
|
||||
coins = []
|
||||
|
||||
# coingecko rate limited me from calling this too often
|
||||
#coin_list = coingecko_client.get_coins_list()
|
||||
#json.dump(coin_list, open('csv/coin_list.json', 'w+'))
|
||||
|
||||
f = open('coin_list.json', 'r')
|
||||
coin_list = json.load(f)
|
||||
f.close()
|
||||
|
||||
# this might be super slow as coin_list is large
|
||||
for s in symbols:
|
||||
for c in coin_list:
|
||||
if c['symbol'].upper() == s and c['id'] != 'binance-peg-cardano': # hackaround for two coins with symbol ada:
|
||||
coins.append(c['id'])
|
||||
|
||||
crypto_info = {}
|
||||
print(coins)
|
||||
response = coingecko_client.get_price(ids=','.join(coins), vs_currencies = unique_bases, include_24hr_change=True)
|
||||
|
||||
#print(response)
|
||||
|
||||
|
||||
for i,sb in enumerate(symbol_base):
|
||||
#coin_info[name] = [symbol, base]
|
||||
#info = coin_info[coin]
|
||||
#CSV.write(info[0] + ',' + coin + ',' + info[1] + ',' +str(response[coin][info[1]]) + ',' + str(response[coin]['usd_24h_change']) + '\n')
|
||||
crypto_info[sb] = {'current':response[coins[i]][bases[i].lower()], '24hr_change':response[coins[i]]['usd_24h_change']}
|
||||
|
||||
all_crypto_settings['symbols'] = crypto_info
|
||||
|
||||
json.dump(all_crypto_settings, open('csv/crypto_settings.json', 'w+'))
|
||||
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
logf.write(str(e))
|
||||
logf.write('. file: ' + fname)
|
||||
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
||||
logf.write('. type: ' + str(exc_type))
|
||||
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
||||
|
||||
def updateForex():
|
||||
|
||||
|
||||
|
||||
try:
|
||||
f = open('csv/forex_settings.json', 'r')
|
||||
all_forex_settings = json.load(f)
|
||||
f.close()
|
||||
|
||||
forex_info = all_forex_settings['symbols']
|
||||
symbol_base = list(forex_info.keys())
|
||||
|
||||
symbols = [sb.split(',')[0] for sb in symbol_base]
|
||||
bases = [sb.split(',')[1] for sb in symbol_base]
|
||||
unique_bases = list(set(bases))
|
||||
|
||||
|
||||
targets = ','.join(symbols)
|
||||
|
||||
data = []
|
||||
for base in unique_bases:
|
||||
url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/forex?base={}&targets='.format(base)+targets
|
||||
response = requests.get(url)
|
||||
data.extend(response.json())
|
||||
|
||||
|
||||
|
||||
|
||||
c_dict = {}
|
||||
|
||||
for i, sb in enumerate(symbol_base):
|
||||
|
||||
# check symbol base has data and if so add it to c_dict
|
||||
for d in data:
|
||||
|
||||
if d['uid'] == bases[i] + '/' + symbols[i]:
|
||||
c_dict[sb] = {'current': d['rate'], '24hr_change': d['rate_over_24hr']}
|
||||
|
||||
|
||||
|
||||
|
||||
all_forex_settings['symbols'] = c_dict
|
||||
json.dump(all_forex_settings, open( "csv/forex_settings.json", 'w+' ))
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
logf.write(str(e))
|
||||
logf.write('. file: ' + fname)
|
||||
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
||||
logf.write('. type: ' + str(exc_type))
|
||||
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
||||
|
||||
|
||||
def updateNews():
|
||||
|
||||
#'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news?category=technology'
|
||||
#'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news?country=GB'
|
||||
#'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news?lang=en'
|
||||
|
||||
max_per_cat = 10
|
||||
try:
|
||||
|
||||
all_settings = json.load(open('csv/news_settings.json', 'r'))
|
||||
|
||||
|
||||
try:
|
||||
|
||||
url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news?country={}'.format(all_settings['country'])
|
||||
response = requests.get(url)
|
||||
data = response.json()
|
||||
|
||||
#load user settings
|
||||
headlines = data
|
||||
except Exception as e:
|
||||
print('news ettings not used', e)
|
||||
#if no settings just get top headlines
|
||||
headlines = newsapi.get_top_headlines()
|
||||
|
||||
headline_titles = [headline['title'] for headline in headlines['articles']]
|
||||
headline_sources = [headline['source']['name'] for headline in headlines['articles']]
|
||||
headline_times = [headline['publishedAt']for headline in headlines['articles']]
|
||||
|
||||
headlines = list(zip(headline_titles, headline_sources, headline_times))
|
||||
all_settings['headlines'] = headlines
|
||||
|
||||
json.dump(all_settings, open('csv/news_settings.json', 'w+'))
|
||||
|
||||
|
||||
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
logf.write(str(e))
|
||||
logf.write('. file: ' + fname)
|
||||
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
||||
logf.write('. type: ' + str(exc_type))
|
||||
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
||||
|
||||
|
||||
def updateWeather():
|
||||
max_cities = 30
|
||||
api_key = 'a9476947fa1a2f712076453bec4a0df5'
|
||||
try:
|
||||
gn = geocoders.GeoNames(username='fintic')
|
||||
|
||||
f = open('csv/daily_weather.json', 'r')
|
||||
all_daily_settings = json.load(f)
|
||||
f.close()
|
||||
|
||||
f = open('csv/current_weather.json', 'r')
|
||||
all_current_settings = json.load(f)
|
||||
f.close()
|
||||
|
||||
current_locations = list(all_current_settings['locations'].keys())
|
||||
daily_locations = list(all_daily_settings['locations'].keys())
|
||||
|
||||
all_locations = list(set(current_locations + daily_locations))
|
||||
|
||||
|
||||
current_list = []
|
||||
daily_list = []
|
||||
|
||||
|
||||
|
||||
for location in all_locations:
|
||||
loc = gn.geocode(location)
|
||||
current_weather = {}
|
||||
|
||||
lat = loc.latitude
|
||||
lon = loc.longitude
|
||||
url = 'https://api.openweathermap.org/data/2.5/onecall?lat={}&units=metric&lon={}&appid={}'.format(lat, lon, api_key)
|
||||
r = requests.get(url)
|
||||
|
||||
weather = r.json()['current']
|
||||
|
||||
current_weather['main_weather'] = weather['weather'][0]['main']
|
||||
current_weather['description'] = weather['weather'][0]['description']
|
||||
current_weather['temp'] = weather['temp']
|
||||
current_weather['min_temp'] = r.json()['daily'][0]['temp']['min']
|
||||
current_weather['max_temp'] = r.json()['daily'][0]['temp']['max']
|
||||
current_weather['feels_like'] = weather['feels_like']
|
||||
current_weather['humidity'] = weather['humidity']
|
||||
current_weather['clouds'] = weather['clouds']
|
||||
current_weather['wind_speed'] = weather['wind_speed']
|
||||
current_weather['wind_direction'] = weather['wind_deg']
|
||||
current_weather['visibility'] = weather['visibility']
|
||||
current_weather['uv'] = weather['uvi']
|
||||
current_weather['rain_chance'] = r.json()['hourly'][0]['pop']
|
||||
|
||||
|
||||
if location in current_locations:
|
||||
current_list.append(current_weather)
|
||||
|
||||
daily_weather = []
|
||||
daily = r.json()['daily']
|
||||
|
||||
for day in daily:
|
||||
dct = {}
|
||||
dct['main_weather'] = day['weather'][0]['main']
|
||||
dct['description'] = day['weather'][0]['description']
|
||||
dct['min_temp'] = day['temp']['min']
|
||||
dct['max_temp'] = day['temp']['max']
|
||||
daily_weather.append(dct)
|
||||
|
||||
#add relevant urrent information to first day in daily
|
||||
daily_weather[0]['temp'] = weather['temp']
|
||||
daily_weather[0]['rain_chance'] = current_weather['rain_chance']
|
||||
daily_weather[0]['humidity'] = current_weather['humidity']
|
||||
daily_weather[0]['wind_speed'] = current_weather['wind_speed']
|
||||
daily_weather[0]['uv'] = current_weather['uv']
|
||||
daily_weather[0]['clouds'] = current_weather['clouds']
|
||||
daily_weather[0]['wind_speed'] = current_weather['wind_speed']
|
||||
daily_weather[0]['wind_direction'] = current_weather['wind_direction']
|
||||
daily_weather[0]['visibility'] = current_weather['visibility']
|
||||
|
||||
if location in daily_locations:
|
||||
daily_list.append(daily_weather)
|
||||
|
||||
current_weathers = {}
|
||||
daily_weathers = {}
|
||||
|
||||
for i,loc in enumerate(current_locations):
|
||||
current_weathers[loc] = current_list[i]
|
||||
|
||||
for i,loc in enumerate(daily_locations):
|
||||
daily_weathers[loc] = daily_list[i]
|
||||
|
||||
all_current_settings['locations'] = current_weathers
|
||||
all_daily_settings['locations'] = daily_weathers
|
||||
|
||||
json.dump( all_current_settings, open( "csv/current_weather.json", 'w+' ))
|
||||
json.dump( all_daily_settings, open( "csv/daily_weather.json", 'w+' ))
|
||||
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
logf.write(str(e))
|
||||
logf.write('. file: ' + fname)
|
||||
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
||||
logf.write('. type: ' + str(exc_type))
|
||||
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
||||
|
||||
|
||||
def updateLeagueTables(api_key, league_ids):
|
||||
|
||||
|
||||
try:
|
||||
f = open('csv/league_tables.json', 'r')
|
||||
all_settings = json.load(f)
|
||||
f.close()
|
||||
|
||||
leagues = all_settings['leagues'].keys()
|
||||
leagues_info = {}
|
||||
for league in leagues:
|
||||
league_id = league_ids[league]
|
||||
url = 'https://www.thesportsdb.com/api/v1/json/{}/lookuptable.php?l={}&s=2020-2021'.format(api_key, league_id)
|
||||
|
||||
r = requests.get(url)
|
||||
try:
|
||||
all_data = r.json()
|
||||
except Exception as e: # there is no data available
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
logf.write(str(e))
|
||||
logf.write('. file: ' + fname)
|
||||
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
||||
logf.write('. type: ' + str(exc_type))
|
||||
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
||||
logf.write(url)
|
||||
continue
|
||||
|
||||
|
||||
|
||||
|
||||
teams = []
|
||||
|
||||
|
||||
for i in range(len(all_data['table'])):
|
||||
team = {}
|
||||
|
||||
|
||||
|
||||
team['name'] = all_data['table'][i]['strTeam']
|
||||
team['wins'] = all_data['table'][i]['intWin']
|
||||
team['loss'] = all_data['table'][i]['intLoss']
|
||||
team['draw'] = all_data['table'][i]['intDraw']
|
||||
team['played'] = all_data['table'][i]['intPlayed']
|
||||
team['standing'] = all_data['table'][i]['intRank']
|
||||
team['points'] = all_data['table'][i]['intPoints']
|
||||
|
||||
teams.append(team)
|
||||
leagues_info[league] = teams
|
||||
all_settings['leagues'] = leagues_info
|
||||
json.dump(all_settings, open( "csv/league_tables.json".format(league), 'w+' ))
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
logf.write(str(e))
|
||||
logf.write('. file: ' + fname)
|
||||
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
||||
logf.write('. type: ' + str(exc_type))
|
||||
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
||||
|
||||
|
||||
def updateLeagueEvents(api_key, league_ids, time):
|
||||
|
||||
if time == 'past':
|
||||
url ='https://www.thesportsdb.com/api/v1/json/{}/eventspastleague.php?id={}' #last 15 events on the league (premium only)
|
||||
f = open('csv/past_games.json')
|
||||
elif time == 'upcoming':
|
||||
url ='https://www.thesportsdb.com/api/v1/json/{}/eventsnextleague.php?id={}' #next 15 events on the league (premium only)
|
||||
f = open('csv/upcoming_games.json')
|
||||
elif time == 'live':
|
||||
f = open('csv/live_games.json')
|
||||
url = 'https://thesportsdb.com/api/v2/json/{}/livescore.php?l={}'
|
||||
|
||||
|
||||
|
||||
try:
|
||||
all_settings = json.load(f)
|
||||
f.close()
|
||||
leagues = all_settings['leagues'].keys()
|
||||
leagues_info = {}
|
||||
for league in leagues:
|
||||
league_id = league_ids[league]
|
||||
url = url.format(api_key, league_id)
|
||||
|
||||
r = requests.get(url)
|
||||
try:
|
||||
all_data = r.json()
|
||||
except Exception as e: # there is no data available
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
logf.write(str(e))
|
||||
logf.write('. file: ' + fname)
|
||||
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
||||
logf.write('. type: ' + str(exc_type))
|
||||
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
||||
logf.write(url)
|
||||
continue
|
||||
|
||||
|
||||
|
||||
events = []
|
||||
|
||||
if not all_data['events'] is None:
|
||||
|
||||
for i in range(len(all_data['events'])):
|
||||
event = {}
|
||||
event['date'] = all_data['events'][i]['dateEvent']
|
||||
|
||||
if time == 'live':
|
||||
event['time'] = all_data['events'][i]['strEventTime']
|
||||
event['progess'] = all_data['events'][i]['strProgress']
|
||||
event['status'] = all_data['events'][i]['strStatus']
|
||||
else:
|
||||
event['time'] = all_data['events'][i]['strTime']
|
||||
event['round'] = all_data['events'][i]['intRound']
|
||||
event['home_team'] = all_data['events'][i]['strHomeTeam']
|
||||
event['home_score'] = all_data['events'][i]['intHomeScore']
|
||||
event['away_team'] = all_data['events'][i]['strAwayTeam']
|
||||
event['away_score'] = all_data['events'][i]['intAwayScore']
|
||||
|
||||
events.append(event)
|
||||
leagues_info[league] = events
|
||||
all_settings['leagues'] = leagues_info
|
||||
|
||||
json.dump(all_settings, open( "csv/{}_games.json".format(time), 'w+' ))
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
logf.write(str(e))
|
||||
logf.write('. file: ' + fname)
|
||||
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
||||
logf.write('. type: ' + str(exc_type))
|
||||
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
||||
|
||||
|
||||
def updateSports():
|
||||
#read user settings to decide which sprots to update
|
||||
api_key = '97436974'
|
||||
|
||||
|
||||
|
||||
league_ids = {'Premier League':'4328', 'NHL':'4380', 'NBA':'4387', 'NFL':'4391'}
|
||||
updateLeagueTables(api_key, league_ids)
|
||||
|
||||
updateLeagueEvents(api_key, league_ids, 'live')
|
||||
updateLeagueEvents(api_key, league_ids, 'past')
|
||||
updateLeagueEvents(api_key, league_ids, 'upcoming')
|
||||
|
||||
|
||||
|
||||
'https://www.thesportsdb.com/api/v1/json/{}/eventsnext.php?id=133602'.format(api_key) # next five events by team ID (paid) use this for upcoming team games
|
||||
|
||||
#url = 'https://www.thesportsdb.com/api/v1/json/{}/eventsseason.php?id=4328&s=2020-2021'.format(api_key) # all past events in premier league
|
||||
|
||||
url = 'https://www.thesportsdb.com/api/v2/json/{}/livescore.php?l=4380'.format(api_key) #live scores
|
||||
|
||||
|
||||
def checkStocks(last_update, update_frequency):
|
||||
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
|
||||
opening = NY_time.replace(hour=9, minute=30, second=0, microsecond=0).replace(tzinfo=None)
|
||||
closing = NY_time.replace(hour=16, minute=0, second=0, microsecond=0).replace(tzinfo=None)
|
||||
|
||||
|
||||
f = open('csv/stocks_settings.json', 'r')
|
||||
all_stocks_settings = json.load(f)
|
||||
f.close()
|
||||
stock_info = all_stocks_settings['symbols']
|
||||
symbols = list(stock_info.keys())
|
||||
|
||||
updated = False
|
||||
|
||||
diff = (NY_time - last_update).total_seconds()/60 #minutes
|
||||
if opening < NY_time < closing and datetime.today().weekday() < 5: # we need to do real time updating
|
||||
|
||||
|
||||
if diff >= update_frequency:
|
||||
updated = True
|
||||
updateStocks()
|
||||
|
||||
|
||||
elif emptyInfo(symbols, stock_info): # if theres any empty stocks
|
||||
updated = True
|
||||
updateStocks()
|
||||
|
||||
|
||||
else:
|
||||
# update if last update was before the previous days closing
|
||||
yday_closing = closing - dt.timedelta(days=1)
|
||||
yday_str = yday_closing.strftime("%d/%m/%Y %H:%M:%S")
|
||||
yday_closing = datetime.strptime(yday_str, "%d/%m/%Y %H:%M:%S")
|
||||
|
||||
if last_update < yday_closing:
|
||||
updated = True
|
||||
updateStocks()
|
||||
|
||||
return updated
|
||||
|
||||
|
||||
def updateAll():
|
||||
updateStocks()
|
||||
updateCrypto()
|
||||
updateForex()
|
||||
updateNews()
|
||||
updateSports()
|
||||
updateWeather()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
logf = open("log.txt", "a")
|
||||
|
||||
t = time.time()
|
||||
|
||||
|
||||
|
||||
updateNews()
|
||||
sys.exit()
|
||||
|
||||
|
||||
|
||||
|
||||
newsapi = NewsApiClient(api_key='cf08652bd17647b89aaf469a1a8198a9')
|
||||
|
||||
|
||||
update_frequencies = {'stocks':1, 'crypto':5, 'forex':60, 'news':120, 'weather': 120, 'sports': 120} #minutes
|
||||
|
||||
NY_zone = pytz.timezone('America/New_York')
|
||||
CET_zone = pytz.timezone('Europe/Berlin')
|
||||
|
||||
NY_time = datetime.now(NY_zone)
|
||||
|
||||
CET_time = datetime.now(CET_zone)
|
||||
|
||||
NY_str = NY_time.strftime("%d/%m/%Y %H:%M:%S")
|
||||
CET_str = NY_time.strftime("%d/%m/%Y %H:%M:%S")
|
||||
|
||||
#f = open('csv/last_updates.json', 'w+')
|
||||
#update_times = {'stocks':NY_str, 'crypto':NY_str, 'news':NY_str, 'weather': NY_str, 'forex': CET_str} # all in NY time apart from forex in CET
|
||||
#json.dump(update_times, f)
|
||||
#f.close()
|
||||
|
||||
|
||||
try:
|
||||
f = open('csv/last_updates.json', 'r')
|
||||
last_updates = json.load(f)
|
||||
f.close()
|
||||
|
||||
except:
|
||||
last_updates = {"stocks": "27/06/2021 07:05:39", "crypto": "27/06/2021 07:05:39", "news": "27/06/2021 07:05:39", "weather": "27/06/2021 07:05:39", "forex": "27/06/2021 07:05:39", "sports": "27/06/2021 07:05:39"}
|
||||
|
||||
t = time.time()
|
||||
|
||||
try:
|
||||
while True:
|
||||
|
||||
|
||||
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
|
||||
|
||||
msg = getInput()
|
||||
if msg == 'A':
|
||||
updateAll()
|
||||
|
||||
#stocks
|
||||
stock_time = datetime.strptime(last_updates['stocks'], "%d/%m/%Y %H:%M:%S")
|
||||
stock_frequency = update_frequencies['stocks']
|
||||
if checkStocks(stock_time, stock_frequency) or msg == 's':
|
||||
stock_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
|
||||
last_updates['stocks'] = stock_time
|
||||
updateStocks()
|
||||
|
||||
# crypto
|
||||
crypto_time = datetime.strptime(last_updates['crypto'], "%d/%m/%Y %H:%M:%S")
|
||||
|
||||
|
||||
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
|
||||
diff = (NY_time - crypto_time).total_seconds()/60 #minutes
|
||||
if diff >= update_frequencies['crypto'] or msg == 'c':
|
||||
crypto_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
|
||||
updateCrypto()
|
||||
last_updates['crypto'] = crypto_time
|
||||
|
||||
|
||||
# weather
|
||||
weather_time = datetime.strptime(last_updates['weather'], "%d/%m/%Y %H:%M:%S")
|
||||
|
||||
|
||||
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
|
||||
diff = (NY_time - weather_time).total_seconds()/60 #minutes
|
||||
if diff >= update_frequencies['weather'] or msg == 'w':
|
||||
weather_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
|
||||
updateWeather()
|
||||
last_updates['weather'] = weather_time
|
||||
|
||||
|
||||
# news
|
||||
news_time = datetime.strptime(last_updates['news'], "%d/%m/%Y %H:%M:%S")
|
||||
|
||||
|
||||
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
|
||||
diff = (NY_time - news_time).total_seconds()/60 #minutes
|
||||
if diff >= update_frequencies['news'] or msg == 'n':
|
||||
news_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
|
||||
updateNews()
|
||||
last_updates['news'] = news_time
|
||||
|
||||
# sports
|
||||
sports_time = datetime.strptime(last_updates['sports'], "%d/%m/%Y %H:%M:%S")
|
||||
|
||||
|
||||
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
|
||||
diff = (NY_time - sports_time).total_seconds()/60 #minutes
|
||||
if diff >= update_frequencies['sports'] or msg == 'S':
|
||||
sports_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
|
||||
updateSports()
|
||||
last_updates['sports'] = sports_time
|
||||
|
||||
#forex updates once every 24hours at 1700 CET
|
||||
|
||||
# update if last update was before the previous days closing
|
||||
forex_time = datetime.strptime(last_updates['forex'], "%d/%m/%Y %H:%M:%S")
|
||||
CET_time = datetime.now(CET_zone)
|
||||
yday_update = (CET_time.replace(hour=17, minute=00, second=0, microsecond=0) - dt.timedelta(days=1)).replace(tzinfo=None)
|
||||
diff = (CET_time.replace(tzinfo=None) - forex_time).total_seconds()/60
|
||||
|
||||
opening = CET_time.replace(hour=17, minute=0, second=0, microsecond=0).replace(tzinfo=None)
|
||||
|
||||
#forex updates between 5pm sunday and 5pm friday every hour
|
||||
forex_open = datetime.today().weekday() < 4 or (datetime.today().weekday() == 6 and CET_time > opening) or (datetime.today().weekday() == 4 and CET_time < opening)
|
||||
|
||||
if forex_time < yday_update or msg == 'f' or (diff >= update_frequencies['forex'] and forex_open):
|
||||
forex_time = CET_time.strftime("%d/%m/%Y %H:%M:%S")
|
||||
last_updates['forex'] = forex_time
|
||||
updateForex()
|
||||
|
||||
|
||||
json.dump(last_updates, open('csv/last_updates.json', 'w+'))
|
||||
|
||||
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
||||
logf.write(str(e))
|
||||
logf.write('. file: ' + fname)
|
||||
logf.write('. line: ' + str(exc_tb.tb_lineno))
|
||||
logf.write('. type: ' + str(exc_type))
|
||||
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user