fintic-tracker/database_caller.py

1710 lines
72 KiB
Python
Executable File

# Copyright (C) 2020 Fintic, finticofficial@gmail.com
#
# This file is part of Fintic project, developed by Neythen Treloar and Justin Dunn
#
# This code can not be copied and/or distributed without the express
# permission of Fintic
import finnhub
import time
import csv
import pytz
from datetime import datetime, timedelta
import subprocess
import json
import urllib.request
import datetime as dt
import sys, os, base64, hashlib, hmac, select
import requests
from pycoingecko import CoinGeckoAPI
from newsapi import NewsApiClient
import traceback
from geopy import geocoders
from multiprocessing import Process
try:
time.sleep(80)
f = open('csv/last_updates.json', 'r')
last_updates = json.load(f)
f.close()
last_updates['stocks']['force'] = True
last_updates['prepost']['force'] = True
last_updates['sports_l']['force'] = True
f = open('csv/last_updates.json', 'w')
json.dump(last_updates, f)
f.close()
except:
pass
try:
f = open('csv/scheduler.json', 'r')
schedules = json.load(f)
f.close()
shutdown_schedule_hour = schedules['shutdown']['hour']
shutdown_schedule_minute = schedules['shutdown']['minute']
reboot_schedule_hour = schedules['reboot']['hour']
reboot_schedule_minute = schedules['reboot']['minute']
timezone = schedules['timezone']
shutdown_enabled = schedules['shutdown']['enabled']
reboot_enabled = schedules['reboot']['enabled']
except:
shutdown_schedule_hour = "00"
shutdown_schedule_minute = "00"
reboot_schedule_hour = "00"
reboot_schedule_minute = "00"
timezone = "GMT"
shutdown_enabled = False
reboot_enabled = False
def getInput(Block=False):
if Block or select.select([sys.stdin], [], [], 0) == ([sys.stdin], [], []):
msg = sys.stdin.read(1)
#sys.stdin.flush()
else:
msg = ''
return msg
def emptyInfo(symbols, stock_info):
update = False
for symbol in symbols:
if stock_info[symbol] == -1: # stock with no info
update = True
return update
def updateUpdate(NY_time):
NY_str = NY_time.strftime("%d/%m/%Y %H:%M:%S")
f = open('csv/last_update.csv', 'w+')
f.write(NY_str + '\n')
f.close()
def updateStocks(api_key, logf):
try:
max_stocks = 200
f = open('csv/stocks_settings.json', 'r')
all_stocks_settings = json.load(f)
f.close()
stock_info = all_stocks_settings['symbols']
symbols = list(stock_info.keys())
url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/stocks?symbols='
for symbol in symbols:
url += symbol + ','
url += '&apiKey=' + api_key
response = requests.get(url)
data = response.json()
# stock_info = {}
if len(data) > 0:
for symbol in symbols:
for stock in data:
if stock['symbol'] == symbol:
stock_info[stock['symbol']] = {'current': stock['price'], 'change': stock['change_since'], 'percent_change':stock['percent']}
all_stocks_settings['symbols'] = stock_info
f = open('csv/stocks_settings.json', 'w+')
json.dump(all_stocks_settings, f)
f.close()
except:
pass
#logf = open('log.txt', "a")
#exc_type, exc_obj, exc_tb = sys.exc_info()
#fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
#logf.write(str(e))
#logf.write('. file: ' + fname)
#logf.write('. line: ' + str(exc_tb.tb_lineno))
#logf.write('. type: ' + str(exc_type))
#logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
#logf.close()
def updateStocksPrePost(api_key, logf):
try:
f = open('csv/stocks_settings.json', 'r')
all_stocks_settings = json.load(f)
f.close()
stock_info = all_stocks_settings['symbols']
symbols = list(stock_info.keys())
#KEEP THIS JUST IN CASE V7 GOES DOWN prepost_url = 'https://query2.finance.yahoo.com/v6/finance/quote?symbols='
prepost_url = 'https://query2.finance.yahoo.com/v7/finance/quote?symbols='
for symbol in symbols:
prepost_url += symbol + ','
prepost_url += '&fields=regularMarketPreviousClose,regularMarketPrice,preMarketPrice,preMarketChangePercent,regularMarketChangePercent,regularMarketChange,preMarketChange,postMarketPrice,postMarketChange,postMarketChangePercent&region=US&lang=en-US'
headers = {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36'
}
prepost = requests.get(prepost_url, headers=headers)
if 'Unauthorized' in str(prepost.json()):
prepost = requests.get(prepost_url.replace('v7','v6'), headers=headers)
prepost_data = prepost.json()['quoteResponse']['result']
time_now = datetime.now(pytz.timezone('America/New_York')).strftime("%H:%M EST")
if len(prepost_data) > 0:
for symbol in symbols:
for stock in prepost_data:
if stock['symbol'] == symbol:
stock_info[stock['symbol']] = {"time_now":time_now}
try:
stock_info[stock['symbol']]['Pre-market'] = {'preprice': '%.2f' % stock['preMarketPrice'],
'prechange': '%.2f' % stock['preMarketChange'],
'prepercent': '%.2f' % stock['preMarketChangePercent']}
except:
try:
stock_info[stock['symbol']]['Pre-market'] = {'preprice': '%.2f' % stock['postMarketPrice'],
'prechange': '%.2f' % 0,
'prepercent': '%.2f' % 0}
except:
stock_info[stock['symbol']]['Pre-market'] = {'preprice': '%.2f' % stock['regularMarketPrice'],
'prechange': '%.2f' % 0,
'prepercent': '%.2f' % 0}
try:
stock_info[stock['symbol']]['Post-market'] = {'postprice': '%.2f' % stock['postMarketPrice'],
'postchange': '%.2f' % stock['postMarketChange'],
'postpercent': '%.2f' % stock['postMarketChangePercent']}
except:
stock_info[stock['symbol']]['Post-market'] = {'postprice': '%.2f' % stock['regularMarketPrice'],
'postchange': '%.2f' % 0,
'postpercent': '%.2f' % 0}
all_stocks_settings['symbols'] = stock_info
with open('csv/prepost_settings.json', 'w+') as f:
json.dump(all_stocks_settings['symbols'], f)
except:
pass
def updateCommodities(api_key, logf):
try:
f = open('csv/commodities_settings.json', 'r')
all_commodities_settings = json.load(f)
f.close()
commodity_info = all_commodities_settings['symbols']
symbols = list(commodity_info.keys())
url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/commodities?symbols='
for symbol in symbols:
url += symbol + ','
url += '&apiKey=' + api_key
response = requests.get(url)
data = response.json()
commodity_info = {}
if len(data) > 0:
for symbol in symbols:
for commodity in data:
if commodity['symbol'] == symbol:
commodity_info[commodity['symbol']] = {'current': commodity['price'], 'unit': commodity['unit'], '24hr_change': commodity['price_over_24hr'], 'percent_change': commodity['percent_over_24hr']}
all_commodities_settings['symbols'] = commodity_info
f = open('csv/commodities_settings.json', 'w+')
json.dump(all_commodities_settings, f)
f.close()
except:
pass
#logf = open('log.txt', "a")
#exc_type, exc_obj, exc_tb = sys.exc_info()
#fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
#logf.write(str(e))
#logf.write('. file: ' + fname)
#logf.write('. line: ' + str(exc_tb.tb_lineno))
#logf.write('. type: ' + str(exc_type))
#logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
#logf.close()
def human_format(num):
num = float('{:.3g}'.format(num))
magnitude = 0
while abs(num) >= 1000:
magnitude += 1
num /= 1000.0
return '{}{}'.format('{:f}'.format(num).rstrip('0').rstrip('.'), ['', 'K', 'M', 'B', 'T'][magnitude])
def updateMovies(api_key, logf):
f = open('csv/movie_settings.json', 'r')
all_settings = json.load(f)
f.close()
if all_settings['category'] == 'Popular Movies':
url = 'https://api.themoviedb.org/3/trending/movie/day?'
movieGenre_url = 'https://api.themoviedb.org/3/genre/movie/list?api_key=' + api_key + '&language=en-US'
movieGenre_response = requests.get(movieGenre_url)
movie_genres = movieGenre_response.json()
elif all_settings['category'] == 'Popular TV':
url = 'https://api.themoviedb.org/3/trending/tv/day?'
tvGenre_url = 'https://api.themoviedb.org/3/genre/tv/list?api_key=' + api_key + '&language=en-US'
tvGenre_response = requests.get(tvGenre_url)
tv_genres = tvGenre_response.json()
elif all_settings['category'] == 'Popular All':
url = 'https://api.themoviedb.org/3/trending/all/day?'
movieGenre_url = 'https://api.themoviedb.org/3/genre/movie/list?api_key=' + api_key + '&language=en-US'
movieGenre_response = requests.get(movieGenre_url)
movie_genres = movieGenre_response.json()
tvGenre_url = 'https://api.themoviedb.org/3/genre/tv/list?api_key=' + api_key + '&language=en-US'
tvGenre_response = requests.get(tvGenre_url)
tv_genres = tvGenre_response.json()
url += 'api_key=' + api_key
response = requests.get(url)
data = response.json()
this_out = []
logo_files = []
if len(data) > 0:
movies = data['results']
for movie in movies:
if movie['media_type'] == 'movie':
movie_id = movie['id']
box_office_url = 'https://api.themoviedb.org/3/movie/' + str(movie_id) + '?api_key=' + api_key
box_office_response = requests.get(box_office_url)
box_office_data = box_office_response.json()
budget = human_format(box_office_data['budget'])
revenue = human_format(box_office_data['revenue'])
else:
budget = '0'
revenue = '0'
movie_language = movie['original_language']
movie_votes = movie['vote_average']
movie_votes = "{:.1f}".format(movie_votes)
try:
movie_titles = movie['title']
movie_date = movie['release_date']
except KeyError:
movie_titles = movie['name']
movie_date = movie['first_air_date']
movie_type = movie['media_type']
movie_genre = movie['genre_ids']
movie_logo = 'https://image.tmdb.org/t/p/w500' + movie['backdrop_path']
genrefinal = []
if all_settings['category'] == 'Popular Movies':
for i in movie_genre:
for genre in movie_genres['genres']:
if genre['name'] == 'Science Fiction':
genre['name'] = 'Sci-Fi'
if i == genre['id']:
i = genre['name']
genrefinal.append(i)
elif all_settings['category'] == 'Popular TV':
for i in movie_genre:
for genre in tv_genres['genres']:
if i == genre['id']:
i = genre['name']
genrefinal.append(i)
elif all_settings['category'] == 'Popular All':
if movie['media_type'] == 'movie':
for i in movie_genre:
for genre in movie_genres['genres']:
if genre['name'] == 'Science Fiction':
genre['name'] = 'Sci-Fi'
if i == genre['id']:
i = genre['name']
genrefinal.append(i)
elif movie['media_type'] == 'tv':
for i in movie_genre:
for genre in tv_genres['genres']:
if i == genre['id']:
i = genre['name']
genrefinal.append(i)
this_out.append({'title':movie_titles,
'language':movie_language.upper(),
'votes':str(movie_votes),
'date':movie_date,
'media_type':movie_type.capitalize(),
'genre':genrefinal,
'budget':budget,
'revenue':revenue,
'backdrop':movie['backdrop_path'][1:],
'logo': movie_logo
})
logo_files.append(movie['backdrop_path'][1:])
if movie['backdrop_path'][1:] not in os.listdir('logos/movies/'):
urllib.request.urlretrieve(movie_logo,'logos/movies/' + movie['backdrop_path'])
time.sleep(0.5)
for file in os.listdir('logos/movies/'):
if file not in logo_files:
os.remove('logos/movies/'+file)
all_settings['movies'] = this_out
f = open('csv/movie_settings.json', 'w+')
json.dump(all_settings, f)
f.close()
def updateIpo(api_key, logf):
day = datetime.now(pytz.utc).strftime("%Y-%m-%d")
dt = datetime.strptime(day, "%Y-%m-%d")
start = (dt - timedelta(days=dt.weekday()))
start_date = start.strftime("%Y-%m-%d")
end = start + timedelta(days=21)
end_date = end.strftime("%Y-%m-%d")
ipo_url = 'https://finnhub.io/api/v1/calendar/ipo?from='+start_date+'&to='+end_date+'&token='+api_key
f = open('csv/ipo_settings.json', 'r')
ipo_settings = json.load(f)
f.close()
data = requests.get(ipo_url)
all_ipo = data.json()
ipo_list = []
try:
if len(all_ipo['ipoCalendar']) > 0:
for ipo in all_ipo['ipoCalendar']:
try:
shares = human_format(ipo['numberOfShares'])
except:
shares = 'N/A'
try:
sharesvalue = human_format(ipo['totalSharesValue'])
except:
sharesvalue = 'N/A'
ipo_list.append({
'date':ipo['date'],
'name':ipo['name'],
'shares':shares,
'price':ipo['price'],
'status':ipo['status'],
'symbol':ipo['symbol'],
'sharesvalue':sharesvalue
})
else:
ipo_list = ['No Data']
except:
ipo_list = ['No Data']
ipo_settings['symbols'] = ipo_list
f = open('csv/ipo_settings.json', 'w+')
json.dump(ipo_settings, f)
f.close()
def updateIndices(api_key, logf):
try:
f = open('csv/indices_settings.json', 'r')
all_indices_settings = json.load(f)
f.close()
index_info = all_indices_settings['symbols']
symbols = list(index_info.keys())
url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/indices?symbols='
for symbol in symbols:
url += symbol + ','
url += '&apiKey=' + api_key
response = requests.get(url)
data = response.json()
index_info = {}
if len(data) > 0:
for symbol in symbols:
for index in data:
if index['symbol'] == symbol:
index_info[index['symbol']] = {'name': index['name'], 'current': index['price'], 'point_change': index['change'], 'percent_change': index['percent_change']}
all_indices_settings['symbols'] = index_info
f = open('csv/indices_settings.json', 'w+')
json.dump(all_indices_settings, f)
f.close()
except:
pass
#logf = open('log.txt', "a")
#exc_type, exc_obj, exc_tb = sys.exc_info()
#fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
#logf.write(str(e))
#logf.write('. file: ' + fname)
#logf.write('. line: ' + str(exc_tb.tb_lineno))
#logf.write('. type: ' + str(exc_type))
#logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
#logf.close()
def updateCrypto(api_key, logf):
try:
f = open('csv/crypto_settings.json', 'r')
all_crypto_settings = json.load(f)
f.close()
coin_info = all_crypto_settings['symbols']
symbol_base = list(coin_info.keys())
symbols = [sb.split(',')[0] for sb in symbol_base]
bases = [sb.split(',')[1] for sb in symbol_base]
unique_bases = list(set(bases))
url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/crypto?symbols='
for i,s in enumerate(symbols):
url += bases[i] + '-' + s + ','
url = url[:-1] #remove last comma
url += '&apiKey=' + api_key
response = requests.get(url)
data = response.json()
coin_info = {}
if len(data) > 0:
for sb in symbol_base:
for i,d in enumerate(data):
symbol = d['symbol']
base = d['currency']
if symbol.upper() + ',' + base.upper() == sb:
coin_info[symbol.upper() + ',' + base.upper()] = {'current': d['price'], '24hr_change': d['price_over_24hr'], 'percent_change': d['percent_over_24hr']}
all_crypto_settings['symbols'] = coin_info
f = open('csv/crypto_settings.json', 'w+')
json.dump(all_crypto_settings, f)
f.close()
except:
pass
#logf = open('log.txt', "a")
#exc_type, exc_obj, exc_tb = sys.exc_info()
#fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
#logf.write(str(e))
#logf.write('. file: ' + fname)
#logf.write('. line: ' + str(exc_tb.tb_lineno))
#logf.write('. type: ' + str(exc_type))
#logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
#logf.close()
def updateForex(api_key, logf):
try:
f = open('csv/forex_settings.json', 'r')
all_forex_settings = json.load(f)
f.close()
forex_info = all_forex_settings['symbols']
symbol_base = list(forex_info.keys())
symbols = [sb.split(',')[0] for sb in symbol_base]
bases = [sb.split(',')[1] for sb in symbol_base]
unique_bases = list(set(bases))
targets = ','.join(symbols)
url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/forex?symbols='
for i,s in enumerate(symbols):
url += s + '-' + bases[i] + ','
url = url[:-1] #remove last comma
url += '&apiKey=' + api_key
response = requests.get(url)
data = response.json()
if len(data) > 0:
c_dict = {}
for sb in symbol_base:
for d in data:
if d['uid'].replace('/',',') == sb:
c_dict[d['uid'].replace('/',',')] = {'current': d['rate'], '24hr_change': d['rate_over_24hr'], 'percent_change':d['percent_over_24hr']}
all_forex_settings['symbols'] = c_dict
f = open( "csv/forex_settings.json", 'w+' )
json.dump(all_forex_settings, f)
f.close()
except:
pass
#logf = open('log.txt', "a")
#exc_type, exc_obj, exc_tb = sys.exc_info()
#fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
#logf.write(str(e))
#logf.write('. file: ' + fname)
#logf.write('. line: ' + str(exc_tb.tb_lineno))
#logf.write('. type: ' + str(exc_type))
#logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
#logf.close()
def updateNews(api_key, logf):
try:
f = open('csv/news_settings.json', 'r')
all_settings = json.load(f)
f.close()
if all_settings['use_country']:
if all_settings['country'] == 'Worldwide':
url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news_worldwide?'
else:
c_dict = {'United States':'US', 'Australia':'AU', 'Canada': 'CA', 'Great Britain':'GB', 'New Zealand':'NZ', 'Ireland':'IE', 'Singapore':'SG', 'South Africa': 'ZA', 'Germany': 'DE', 'Hong Kong': 'HK', 'Japan': 'JP', 'South Korea': 'KR', 'China': 'CN', 'France': 'FR', 'India': 'IN', 'Italy': 'IT', 'Switzerland': 'CH', 'Netherlands': 'NL', 'Spain': 'ES', 'Brazil': 'BR', 'Portugal': 'PT'}
cc = c_dict[all_settings['country']]
url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news?country={}'.format(cc)
elif all_settings['use_category']:
url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news?category={}'.format(all_settings['category'])
url += '&apiKey=' + api_key
response = requests.get(url)
data = response.json()
if len(data) > 0:
max_headlines = int(all_settings['num_headlines'])
#load user settings
headlines = data[:max_headlines]
headline_sources = [headline['source'] for headline in headlines]
headline_titles = [headline['title'] for headline in headlines]
headline_times = [headline['publishedAt'] for headline in headlines]
headlines = list(zip(headline_titles, headline_sources, headline_times))
all_settings['headlines'] = headlines
f = open('csv/news_settings.json', 'w+')
json.dump(all_settings, f)
f.close()
except:
pass
#logf = open('log.txt', "a")
#exc_type, exc_obj, exc_tb = sys.exc_info()
#fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
#logf.write(str(e))
#logf.write('. file: ' + fname)
#logf.write('. line: ' + str(exc_tb.tb_lineno))
#logf.write('. type: ' + str(exc_type))
#logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
#logf.close()
def updateWeather(api_key, logf):
max_cities = 30
try:
gn = geocoders.GeoNames(username='fintic')
f = open('csv/daily_weather.json', 'r')
all_daily_settings = json.load(f)
f.close()
f = open('csv/current_weather.json', 'r')
all_current_settings = json.load(f)
f.close()
current_locations = list(all_current_settings['locations'].keys())
daily_locations = list(all_daily_settings['locations'].keys())
all_locations = list(set(current_locations + daily_locations))
current_weathers = {}
daily_weathers = {}
for location in all_locations:
loc = gn.geocode(location)
current_weather = {}
lat = loc.latitude
lon = loc.longitude
url = 'https://api.openweathermap.org/data/2.5/onecall?lat={}&units=metric&lon={}&appid={}'.format(lat, lon, api_key)
r = requests.get(url)
weather = r.json()['current']
current_weather['main_weather'] = weather['weather'][0]['main']
current_weather['description'] = weather['weather'][0]['description']
current_weather['temp'] = weather['temp']
current_weather['min_temp'] = r.json()['daily'][0]['temp']['min']
current_weather['max_temp'] = r.json()['daily'][0]['temp']['max']
current_weather['feels_like'] = weather['feels_like']
current_weather['humidity'] = weather['humidity']
current_weather['clouds'] = weather['clouds']
current_weather['wind_speed'] = weather['wind_speed']
current_weather['wind_direction'] = weather['wind_deg']
current_weather['visibility'] = weather['visibility']
current_weather['uv'] = weather['uvi']
current_weather['rain_chance'] = r.json()['hourly'][0]['pop']
if location in current_locations:
current_weathers[location] = current_weather
daily_weather = []
daily = r.json()['daily']
for day in daily:
dct = {}
dct['main_weather'] = day['weather'][0]['main']
dct['description'] = day['weather'][0]['description']
dct['min_temp'] = day['temp']['min']
dct['max_temp'] = day['temp']['max']
daily_weather.append(dct)
#add relevant urrent information to first day in daily
daily_weather[0]['temp'] = weather['temp']
daily_weather[0]['rain_chance'] = current_weather['rain_chance']
daily_weather[0]['humidity'] = current_weather['humidity']
daily_weather[0]['wind_speed'] = current_weather['wind_speed']
daily_weather[0]['uv'] = current_weather['uv']
daily_weather[0]['clouds'] = current_weather['clouds']
daily_weather[0]['wind_speed'] = current_weather['wind_speed']
daily_weather[0]['wind_direction'] = current_weather['wind_direction']
daily_weather[0]['visibility'] = current_weather['visibility']
if location in daily_locations:
daily_weathers[location] = daily_weather
all_current_settings['locations'] = current_weathers
all_daily_settings['locations'] = daily_weathers
f = open( "csv/current_weather.json", 'w+' )
json.dump( all_current_settings, f)
f.close()
f = open( "csv/daily_weather.json", 'w+' )
json.dump( all_daily_settings, f)
f.close()
except:
pass
#logf = open('log.txt', "a")
#exc_type, exc_obj, exc_tb = sys.exc_info()
#fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
#logf.write(str(e))
#logf.write('. file: ' + fname)
#logf.write('. line: ' + str(exc_tb.tb_lineno))
#logf.write('. type: ' + str(exc_type))
#logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
#logf.close()
def updateLeagueTables(api_key, logf):
url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/sports?stats='
try:
f = open('csv/league_tables.json', 'r')
all_settings = json.load(f)
f.close()
leagues = all_settings['leagues'].keys()
leagues_info = {}
for league in leagues:
if league == 'PREMIERLEAGUE':
url += 'PREMIERLEAGUE,'
else:
url += league + ','
url = url[:-1] # remove last comma
url += '&apiKey=' + api_key
r = requests.get(url)
all_data = r.json()
for i,l in enumerate(all_data):
league = list(l.keys())[0]
teams = []
if league == 'pga' or league == 'lpga':
logo_files = []
for d in all_data[i][league]:
del d['_id'], d['updated']
teams.append(d)
try:
if d['country'].split('/')[-1].split('&')[0] not in os.listdir('logos/ufc_countries/'):
urllib.request.urlretrieve(d['country'], 'logos/ufc_countries/' + d['country'].split('/')[-1].split('&')[0])
except:
pass
try:
if league == 'pga':
if d['photo'].split('/')[-1].split('&')[0] not in os.listdir('logos/pga_rank/'):
urllib.request.urlretrieve(d['photo'],'logos/pga_rank/' + d['photo'].split('/')[-1].split('&')[0])
elif league == 'lpga':
if d['photo'].split('/')[-1] not in os.listdir('logos/lpga_rank/'):
urllib.request.urlretrieve(d['photo'],'logos/lpga_rank/' + d['photo'].split('/')[-1])
except:
pass
try:
if league == 'pga':
logo_files.append(d['photo'].split('/')[-1].split('&')[0])
elif league == 'lpga':
logo_files.append(d['photo'].split('/')[-1])
except:
pass
if league == 'pga':
for file in os.listdir('logos/pga_rank/'):
if file not in logo_files:
os.remove('logos/pga_rank/'+ file)
elif league == 'lpga':
for file in os.listdir('logos/lpga_rank/'):
if file not in logo_files:
os.remove('logos/lpga_rank/'+ file)
else:
for d in all_data[i][league]:
team = {}
team['name'] = d['strTeam']
team['wins'] = d['intWin']
team['loss'] = d['intLoss']
team['draw'] = d['intDraw']
#team['played'] = d['intPlayed']
team['standing'] = d['intRank']
#team['points'] = d['intPoints']
teams.append(team)
leagues_info[league.upper()] = teams
all_settings['leagues'] = leagues_info
f = open( "csv/league_tables.json".format(league), 'w+' )
json.dump(all_settings, f)
f.close()
except:
pass
#logf = open('log.txt', "a")
#exc_type, exc_obj, exc_tb = sys.exc_info()
#fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
#logf.write(str(e))
#logf.write('. file: ' + fname)
#logf.write('. line: ' + str(exc_tb.tb_lineno))
#logf.write('. type: ' + str(exc_type))
#logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
#logf.close()
def updatePLtime():
f = open('csv/live_games.json')
try:
all_settings = json.load(f)
f.close()
try:
for league in all_settings['leagues']:
if league == 'PREMIERLEAGUE':
subprocess.run(["sudo", "pkill", "-f", "live_pl.py"], shell=False)
premierleague = subprocess.Popen(["python3", "live_pl.py"], shell=False)
except:
pass
except:
pass
def updateLeagueEvents(api_key, time, logf):
url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/sports?{}='.format(time)
if time == 'past':
f = open('csv/past_games.json')
elif time == 'upcoming':
f = open('csv/upcoming_games.json')
elif time == 'livescore':
f = open('csv/live_games.json')
try:
all_settings = json.load(f)
f.close()
if time == 'livescore':
try:
leagues_info = {}
for league in all_settings['leagues']:
events = []
if league == 'NFL':
subprocess.run(["sudo", "pkill", "-f", "live_nfl.py"], shell=False)
nfl = subprocess.Popen(["python3", "live_nfl.py"], shell=False)
events.append('Filled')
leagues_info[league.upper()] = events
if league == 'NBA':
subprocess.run(["sudo", "pkill", "-f", "live_nba.py"], shell=False)
nba = subprocess.Popen(["python3", "live_nba.py"], shell=False)
events.append('Filled')
leagues_info[league.upper()] = events
if league == 'NHL':
subprocess.run(["sudo", "pkill", "-f", "live_nhl.py"], shell=False)
nhl = subprocess.Popen(["python3", "live_nhl.py"], shell=False)
events.append('Filled')
leagues_info[league.upper()] = events
if league == 'MLB':
subprocess.run(["sudo", "pkill", "-f", "live_mlb.py"], shell=False)
mlb = subprocess.Popen(["python3", "live_mlb.py"], shell=False)
events.append('Filled')
leagues_info[league.upper()] = events
if league == 'PREMIERLEAGUE':
subprocess.run(["sudo", "pkill", "-f", "live_pl.py"], shell=False)
premierleague = subprocess.Popen(["python3", "live_pl.py"], shell=False)
events.append('Filled')
leagues_info[league.upper()] = events
if league == 'MLS':
subprocess.run(["sudo", "pkill", "-f", "live_mls.py"], shell=False)
mls = subprocess.Popen(["python3", "live_mls.py"], shell=False)
events.append('Filled')
leagues_info[league.upper()] = events
all_settings['leagues'] = leagues_info
f = open( "csv/live_games.json", 'w+' )
json.dump(all_settings, f)
f.close()
except:
pass
else:
leagues = all_settings['leagues'].keys()
leagues_info = {}
for league in leagues:
if league == 'PREMIERLEAGUE':
url += 'PREMIERLEAGUE,'
else:
url += league + ','
url = url[:-1] # remove last comma
url += '&apiKey=' + api_key
r = requests.get(url)
all_data = r.json()
for league in all_data.keys():
ten_or_fifteen = slice(None)
events = []
if (league == 'PGA') or (league == 'LPGA') or (league == 'PGA_EU') or (league == 'LIV') or (league == 'F1') or (league == 'NASCAR'):
ten_or_fifteen = slice(3)
else:
ten_or_fifteen = slice(None)
if league == 'UFC':
event = all_data['UFC'][0]
events.append(event)
if time == 'upcoming':
try:
logo_files = []
for each in all_data['UFC'][0]['fights']:
try:
if each['fighter1pic'].split('/')[-1].split('&')[0] not in os.listdir('logos/ufc/'):
urllib.request.urlretrieve(each['fighter1pic'],'logos/ufc/' + each['fighter1pic'].split('/')[-1].split('&')[0])
except:
pass
try:
if each['fighter2pic'].split('/')[-1].split('&')[0] not in os.listdir('logos/ufc/'):
urllib.request.urlretrieve(each['fighter2pic'],'logos/ufc/' + each['fighter2pic'].split('/')[-1].split('&')[0])
except:
pass
try:
logo_files.append(each['fighter2pic'].split('/')[-1].split('&')[0])
except:
pass
try:
logo_files.append(each['fighter1pic'].split('/')[-1].split('&')[0])
except:
pass
#country flags
try:
if each['fighter1country'].split('/')[-1].split('&')[0] not in os.listdir('logos/ufc_countries/'):
urllib.request.urlretrieve(each['fighter1country'], 'logos/ufc_countries/' + each['fighter1country'].split('/')[-1].split('&')[0])
except:
pass
try:
if each['fighter2country'].split('/')[-1].split('&')[0] not in os.listdir('logos/ufc_countries/'):
urllib.request.urlretrieve(each['fighter2country'], 'logos/ufc_countries/' + each['fighter2country'].split('/')[-1].split('&')[0])
except:
pass
for file in os.listdir('logos/ufc/'):
if file not in logo_files:
os.remove('logos/ufc/'+ file)
except:
pass
elif time == 'past':
try:
logo_files = []
for each in all_data['UFC'][0]['fights']:
try:
if each['fighter1pic'].split('/')[-1].split('&')[0] not in os.listdir('logos/ufc_past/'):
urllib.request.urlretrieve(each['fighter1pic'],'logos/ufc_past/' + each['fighter1pic'].split('/')[-1].split('&')[0])
except:
pass
try:
if each['fighter2pic'].split('/')[-1].split('&')[0] not in os.listdir('logos/ufc_past/'):
urllib.request.urlretrieve(each['fighter2pic'],'logos/ufc_past/' + each['fighter2pic'].split('/')[-1].split('&')[0])
except:
pass
try:
logo_files.append(each['fighter2pic'].split('/')[-1].split('&')[0])
except:
pass
try:
logo_files.append(each['fighter1pic'].split('/')[-1].split('&')[0])
except:
pass
#country flags
try:
if each['fighter1country'].split('/')[-1].split('&')[0] not in os.listdir('logos/ufc_countries/'):
urllib.request.urlretrieve(each['fighter1country'], 'logos/ufc_countries/' + each['fighter1country'].split('/')[-1].split('&')[0])
except:
pass
try:
if each['fighter2country'].split('/')[-1].split('&')[0] not in os.listdir('logos/ufc_countries/'):
urllib.request.urlretrieve(each['fighter2country'], 'logos/ufc_countries/' + each['fighter2country'].split('/')[-1].split('&')[0])
except:
pass
for file in os.listdir('logos/ufc_past/'):
if file not in logo_files:
os.remove('logos/ufc_past/'+ file)
except:
pass
else:
for d in all_data[league][ten_or_fifteen]:
event = {}
event['date'] = d['dateEvent']
try:
event['date2'] = d['dateEvent2']
except:
pass
if time == 'live':
event['progess'] = d['strProgress']
event['status'] = d['strStatus']
else:
if (league == 'PGA') or (league == 'LPGA') or (league == 'PGA_EU') or (league == 'LIV') or (league == 'F1') or (league == 'NASCAR'):
event['date'] = d['dateEvent']
try:
event['date2'] = d['dateEvent2']
except:
pass
try:
event['total_yards'] = d['total_yards']
event['shots_par'] = d['shots_par']
event['purse'] = d['purse']
except:
pass
event['event'] = d['strEvent'].replace("\u2019","'")
event['venue'] = d['strVenue'].replace("\u2019","'")
event['city'] = d['strCity'].replace("\u2019","'")
event['country'] = d['strCountry']
event['season'] = d['strSeason']
else:
event['round'] = d['intRound']
event['time'] = d['strTime']
event['home_team'] = d['strHomeTeam']
event['away_team'] = d['strAwayTeam']
if time != 'upcoming':
if (league == 'PGA') or (league == 'LPGA') or (league == 'PGA_EU'):
# event['golf_standings'] = d['strResult']
event['golf_rankings'] = d['player_results']
for player in event['golf_rankings']:
try:
if player['country'].split('/')[-1].split('&')[0] not in os.listdir('logos/ufc_countries/'):
urllib.request.urlretrieve(player['country'], 'logos/ufc_countries/' + player['country'].split('/')[-1].split('&')[0])
except:
pass
# rank = ['n1', 'n2', 'n3', 'n4', 'n5', 'n6', 'n7', 'n8', 'n9', 'n10', 'T1', 'T2', 'T3', 'T4', 'T5',
# 'T6', 'T7', 'T8', 'T9', 'T10']
# def convert(string):
# string = repr(string).replace('/', '')
# li = list(string.split('\\'))
# return li
# str3 = convert(event['golf_standings'])
# players = []
# for each in str3:
# each = each.replace('nT', 'T', 1)
# if each[:2] in rank:
# try:
# first_space = each.find(' ', 1)
# second_space = each.find(' ', 4)
# first_name = each[first_space:second_space].lstrip()
# initial = first_name[0] + '.'
# each = each.replace(first_name,initial)
# except:
# pass
# interator = each.find('-')
# if interator < 0:
# interator = 0
# interator2 = each[interator:interator + 3]
# result = each.split(interator2, 1)[0] + interator2
# players.append(result.rstrip())
# event['golf_standings'] = players
elif (league == 'LIV'):
# event['golf_standings'] = d['strResult']
event['golf_rankings'] = d['player_results']
for player in event['golf_rankings']:
try:
if player['country'].split('/')[-1].split('&')[0] not in os.listdir('logos/ufc_countries/'):
urllib.request.urlretrieve(player['country'], 'logos/ufc_countries/' + player['country'].split('/')[-1].split('&')[0])
except:
pass
# rank = ['n1', 'n2', 'n3', 'n4', 'n5', 'n6', 'n7', 'n8', 'n9', 'n10', 'T1', 'T2', 'T3', 'T4', 'T5',
# 'T6', 'T7', 'T8', 'T9', 'T10']
# def convert(string):
# string = repr(string).replace('/', '')
# li = list(string.split('\\'))
# return li
# try:
# str3 = convert(event['golf_standings'].split('--------------------------------------')[0])
# strTeams = convert(event['golf_standings'].split('--------------------------------------')[1])
# except:
# pass
# players = []
# teams = []
# try:
# for each in str3:
# each = each.replace('nT', 'T', 1)
# if each[:2] in rank:
# try:
# first_space = each.find(' ', 1)
# second_space = each.find(' ', 4)
# first_name = each[first_space:second_space].lstrip()
# initial = first_name[0] + '.'
# each = each.replace(first_name,initial)
# except:
# pass
# interator = each.find('-')
# if interator < 0:
# interator = 0
# interator2 = each[interator:interator + 3]
# result = each.split(interator2, 1)[0] + interator2
# players.append(result.rstrip())
# for each in strTeams:
# each = each.replace('nT', 'T', 1)
# if each[:2] in rank:
# each = each.split('GC')
# score = each[1].rfind(' ')
# score2 = each[1][score:score+4]
# each2 = each[0] + score2
# teams.append(each2)
# except:
# pass
# event['golf_standings'] = [players] + [teams]
else:
event['away_score'] = d['intAwayScore']
event['home_score'] = d['intHomeScore']
events.append(event)
leagues_info[league.upper()] = events
all_settings['leagues'] = leagues_info
f = open( "csv/{}_games.json".format(time), 'w+' )
json.dump(all_settings, f)
f.close()
except:
pass
def updateSports(api_key, logf):
#read user settings to decide which sprots to update
updateLeagueTables(api_key, logf)
updateLeagueEvents(api_key,'livescore', logf)
updateLeagueEvents(api_key,'past', logf)
updateLeagueEvents(api_key,'upcoming', logf)
def checkStocks(last_update, update_frequency):
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
opening = NY_time.replace(hour=9, minute=30, second=0, microsecond=0).replace(tzinfo=None)
closing = NY_time.replace(hour=16, minute=5, second=0, microsecond=0).replace(tzinfo=None)
f = open('csv/stocks_settings.json', 'r')
all_stocks_settings = json.load(f)
f.close()
stock_info = all_stocks_settings['symbols']
symbols = list(stock_info.keys())
updated = False
diff = (NY_time - last_update).total_seconds()/60 #minutes
if opening < NY_time < closing and datetime.today().weekday() < 5: # we need to do real time updating
if diff >= update_frequency:
updated = True
elif emptyInfo(symbols, stock_info): # if theres any empty stocks
updated = True
else:
# update if last update was before the previous days closing
yday_closing = closing - dt.timedelta(days=1)
yday_str = yday_closing.strftime("%d/%m/%Y %H:%M:%S")
yday_closing = datetime.strptime(yday_str, "%d/%m/%Y %H:%M:%S")
if last_update < yday_closing:
updated = True
return updated
def updateAll(api_key, weather_key, logf):
updateStocks(api_key, logf)
updateCrypto(api_key, logf)
updateForex(api_key, logf)
updateNews(api_key, logf)
updateSports(api_key, logf)
if weather_key:
updateWeather(weather_key, logf)
past_espn_time = True
past_pl_time = True
if __name__ == '__main__':
logf = open("log.txt", "a")
t = time.time()
update_frequencies = {'stocks':2, 'crypto':7, 'forex':60, 'news':120, 'weather': 120, 'sports': 1440, 'commodities': 15, 'indices': 15, 'movies': 1440, 'ipo': 1440, 'prepost': 15} #minutes
NY_zone = pytz.timezone('America/New_York')
CET_zone = pytz.timezone('EST')
NY_time = datetime.now(NY_zone)
CET_time = datetime.now(CET_zone)
NY_str = NY_time.strftime("%d/%m/%Y %H:%M:%S")
CET_str = NY_time.strftime("%d/%m/%Y %H:%M:%S")
#f = open('csv/last_updates.json', 'w+')
#update_times = {'stocks':NY_str, 'crypto':NY_str, 'news':NY_str, 'weather': NY_str, 'forex': CET_str} # all in NY time apart from forex in CET
#json.dump(update_times, f)
#f.close()
f = open('api_keys.txt')
api_keys = f.readlines()
api_key = api_keys[0].strip()
try:
weather_key = api_keys[1].strip()
except Exception as e:
weather_key = False
logf = open('log.txt', "a")
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
logf.write(str(e))
logf.write('. file: ' + fname)
logf.write('. line: ' + str(exc_tb.tb_lineno))
logf.write('. type: ' + str(exc_type))
logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
logf.close()
try:
movie_key = open('movie_api_key.txt').readlines()[0]
except Exception as e:
movie_key = False
try:
ipo_key = open('ipo_api_key.txt').readlines()[0]
except Exception as e:
ipo_key = False
t = time.time()
update_processes = []
# try:
# time.sleep(60)
# f = open('csv/last_updates.json', 'r')
# last_updates = json.load(f)
# f.close()
# last_updates['stocks']['force'] = True
# #last_updates['weather']['force'] = True
# f = open('csv/last_updates.json', 'w')
# json.dump(last_updates, f)
# f.close()
# except:
# pass
try:
while True:
try:
f = open('csv/last_updates.json', 'r')
last_updates = json.load(f)
f.close()
except:
last_updates = {"scheduler":{"force": False}, "stocks": {"time": "06/03/2022 04:12:09", "force": True}, "crypto": {"time": "06/03/2022 04:10:39", "force": True},
"news": {"time": "06/03/2022 04:07:09", "force": True}, "weather": {"time": "06/03/2022 04:08:20", "force": True},
"forex": {"time": "06/03/2022 03:54:02", "force": True}, "sports_l": {"time": "06/03/2022 04:10:09", "force": True},
"sports_p": {"time": "06/03/2022 04:10:09", "force": True},
"sports_u": {"time": "06/03/2022 04:10:09", "force": True},"sports_t": {"time": "06/03/2022 04:10:09", "force": True}, "commodities": {"time": "06/03/2022 04:10:09", "force": True}, "indices": {"time": "06/03/2022 04:10:09", "force": True}, "movies": {"time": "06/03/2022 04:10:09", "force": True}, "ipo": {"time": "06/03/2022 04:10:09", "force": True}, "prepost": {"time": "06/03/2022 04:10:09", "force": True}}
try:
if last_updates['scheduler']['force']:
try:
f = open('csv/scheduler.json','r')
schedules = json.load(f)
f.close()
shutdown_schedule_hour = schedules['shutdown']['hour']
shutdown_schedule_minute = schedules['shutdown']['minute']
reboot_schedule_hour = schedules['reboot']['hour']
reboot_schedule_minute = schedules['reboot']['minute']
timezone = schedules['timezone']
shutdown_enabled = schedules['shutdown']['enabled']
reboot_enabled = schedules['reboot']['enabled']
except:
shutdown_schedule_hour = "00"
shutdown_schedule_minute = "00"
reboot_schedule_hour = "00"
reboot_schedule_minute = "00"
timezone = "GMT"
shutdown_enabled = False
reboot_enabled = False
last_updates['scheduler']['force'] = False
except:
pass
#SHUTDOWN
try:
if datetime.now(pytz.timezone(timezone)).strftime("%H:%M") == shutdown_schedule_hour+':'+shutdown_schedule_minute and shutdown_enabled:
os.system('sudo shutdown now')
except:
pass
#REBOOT
try:
if datetime.now(pytz.timezone(timezone)).strftime("%H:%M") == reboot_schedule_hour+':'+reboot_schedule_minute and reboot_enabled:
os.system('sudo reboot')
except:
pass
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
#msg = getInput()
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
#stocks
stock_time = datetime.strptime(last_updates['stocks']['time'], "%d/%m/%Y %H:%M:%S")
stock_frequency = update_frequencies['stocks']
diff = (NY_time - stock_time).total_seconds()/60 #minutes
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
opening = NY_time.replace(hour=9, minute=30, second=0, microsecond=0).replace(tzinfo=None)
closing = NY_time.replace(hour=16, minute=5, second=0, microsecond=0).replace(tzinfo=None)
stock_open = opening < NY_time < closing and datetime.today().weekday() <= 4
if last_updates['stocks']['force'] or (diff >= update_frequencies['stocks'] and stock_open):# or msg == 's':
stock_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
last_updates['stocks']['time'] = stock_time
last_updates['stocks']['force'] = False
#updateStocks(api_key)
update_process = Process(target = updateStocks, args = (api_key,logf))
update_process.start()
update_processes.append(update_process)
NY_time1 = datetime.now(NY_zone).replace(tzinfo=None)
NY_time2 = datetime.now(NY_zone).replace(tzinfo=None)
#prepost
preopen = NY_time1.replace(hour=4, minute=0, second=0, microsecond=0).replace(tzinfo=None)
preclose = NY_time1.replace(hour=9, minute=30, second=0, microsecond=0).replace(tzinfo=None)
postopen = NY_time2.replace(hour=16, minute=0, second=0, microsecond=0).replace(tzinfo=None)
postclose = NY_time2.replace(hour=20, minute=20, second=0, microsecond=0).replace(tzinfo=None)
prepost_frequency = update_frequencies['prepost']
prepost_time = datetime.strptime(last_updates['prepost']['time'], "%d/%m/%Y %H:%M:%S")
pre_open = preopen < NY_time1 < preclose and NY_time1.weekday() <= 4
post_open = postopen < NY_time2 < postclose and NY_time2.weekday() <= 4
diff1 = (NY_time1 - prepost_time).total_seconds()/60 #minutes
diff2 = (NY_time2 - prepost_time).total_seconds()/60 #minutes
if (last_updates['prepost']['force']) or (diff1 >= update_frequencies['prepost'] and pre_open) or (diff2 >= update_frequencies['prepost'] and post_open):
prepost_time = NY_time1.strftime("%d/%m/%Y %H:%M:%S")
last_updates['prepost']['time'] = prepost_time
last_updates['prepost']['force'] = False
update_process = Process(target = updateStocksPrePost, args = (api_key,logf))
update_process.start()
update_processes.append(update_process)
# crypto
crypto_time = datetime.strptime(last_updates['crypto']['time'], "%d/%m/%Y %H:%M:%S")
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
diff = (NY_time - crypto_time).total_seconds()/60 #minutes
if last_updates['crypto']['force'] or diff >= update_frequencies['crypto']:# or msg == 'c':
crypto_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
#updateCrypto(api_key, logf)
last_updates['crypto']['time'] = crypto_time
last_updates['crypto']['force'] = False
update_process = Process(target = updateCrypto, args = (api_key,logf))
update_process.start()
update_processes.append(update_process)
# commodities
commodities_time = datetime.strptime(last_updates['commodities']['time'], "%d/%m/%Y %H:%M:%S")
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
diff = (NY_time - commodities_time).total_seconds()/60 #minutes
if last_updates['commodities']['force'] or diff >= update_frequencies['commodities']:# or msg == 'c':
commodities_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
#updateCrypto(api_key, logf)
last_updates['commodities']['time'] = commodities_time
last_updates['commodities']['force'] = False
update_process = Process(target = updateCommodities, args = (api_key,logf))
update_process.start()
update_processes.append(update_process)
# indices
indices_time = datetime.strptime(last_updates['indices']['time'], "%d/%m/%Y %H:%M:%S")
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
diff = (NY_time - indices_time).total_seconds()/60 #minutes
if last_updates['indices']['force'] or diff >= update_frequencies['indices']:# or msg == 'c':
indices_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
last_updates['indices']['time'] = indices_time
last_updates['indices']['force'] = False
update_process = Process(target = updateIndices, args = (api_key,logf))
update_process.start()
update_processes.append(update_process)
# movies
movies_time = datetime.strptime(last_updates['movies']['time'], "%d/%m/%Y %H:%M:%S")
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
diff = (NY_time - movies_time).total_seconds()/60 #minutes
if last_updates['movies']['force'] or diff >= update_frequencies['movies']:
movies_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
last_updates['movies']['time'] = movies_time
last_updates['movies']['force'] = False
update_process = Process(target = updateMovies, args = (movie_key,logf))
update_process.start()
update_processes.append(update_process)
# ipos
ipo_time = datetime.strptime(last_updates['ipo']['time'], "%d/%m/%Y %H:%M:%S")
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
diff = (NY_time - ipo_time).total_seconds()/60 #minutes
if last_updates['ipo']['force'] or diff >= update_frequencies['ipo']:
ipo_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
last_updates['ipo']['time'] = ipo_time
last_updates['ipo']['force'] = False
update_process = Process(target = updateIpo, args = (ipo_key,logf))
update_process.start()
update_processes.append(update_process)
# weather
weather_time = datetime.strptime(last_updates['weather']['time'], "%d/%m/%Y %H:%M:%S")
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
diff = (NY_time - weather_time).total_seconds()/60 #minutes
if last_updates['weather']['force'] or diff >= update_frequencies['weather']:# or msg == 'w':
weather_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
#updateWeather(weather_key)
last_updates['weather']['time'] = weather_time
last_updates['weather']['force'] = False
update_process = Process(target = updateWeather, args = (weather_key,logf))
update_process.start()
update_processes.append(update_process)
# news
news_time = datetime.strptime(last_updates['news']['time'], "%d/%m/%Y %H:%M:%S")
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
diff = (NY_time - news_time).total_seconds()/60 #minutes
if last_updates['news']['force'] or diff >= update_frequencies['news']:# or msg == 'n':
news_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
#updateNews(api_key)
last_updates['news']['time'] = news_time
last_updates['news']['force'] = False
update_process = Process(target = updateNews, args = (api_key,logf))
update_process.start()
update_processes.append(update_process)
# sports upcoming
sports_time = datetime.strptime(last_updates['sports_u']['time'], "%d/%m/%Y %H:%M:%S")
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
diff = (NY_time - sports_time).total_seconds()/60 #minutes
if last_updates['sports_u']['force'] or diff >= update_frequencies['sports']:# or msg == 'S':
sports_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
#updateSports(api_key)
last_updates['sports_u']['time'] = sports_time
last_updates['sports_u']['force'] = False
update_process = Process(target = updateLeagueEvents, args = (api_key,'upcoming',logf))
update_process.start()
update_processes.append(update_process)
# sports live
sports_time = datetime.strptime(last_updates['sports_l']['time'], "%d/%m/%Y %H:%M:%S")
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
espn_time = "17:00Z"
espn_time_est = "12:00"
# if datetime.now(pytz.utc).strftime("%H:%MZ") < espn_time:
if datetime.now(pytz.timezone('America/New_York')).strftime("%H:%M") < espn_time_est:
past_espn_time = True
if last_updates['sports_l']['force'] or (datetime.now(pytz.timezone('America/New_York')).strftime("%H:%M") >= espn_time_est and past_espn_time):# or msg == 'S':
# if last_updates['sports_l']['force'] or (datetime.now(pytz.utc).strftime("%H:%MZ") >= espn_time and past_espn_time):# or msg == 'S':
sports_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
last_updates['sports_l']['time'] = sports_time
last_updates['sports_l']['force'] = False
past_espn_time = False
update_process = Process(target = updateLeagueEvents, args = (api_key, 'livescore',logf))
update_process.start()
update_processes.append(update_process)
#sports live (premier league)
pl_time = "12:00Z"
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
sports_time = datetime.strptime(last_updates['sports_l']['time'], "%d/%m/%Y %H:%M:%S")
if datetime.now(pytz.utc).strftime("%H:%MZ") < pl_time:
past_pl_time = True
if datetime.now(pytz.utc).strftime("%H:%MZ") >= pl_time and past_pl_time:# or msg == 'S':
sports_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
past_pl_time = False
last_updates['sports_l']['time'] = sports_time
last_updates['sports_l']['force'] = False
update_process = Process(target = updatePLtime)
update_process.start()
update_processes.append(update_process)
# sports past
sports_time = datetime.strptime(last_updates['sports_p']['time'], "%d/%m/%Y %H:%M:%S")
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
diff = (NY_time - sports_time).total_seconds()/60 #minutes
if last_updates['sports_p']['force'] or diff >= update_frequencies['sports']:# or msg == 'S':
sports_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
#updateSports(api_key)
last_updates['sports_p']['time'] = sports_time
last_updates['sports_p']['force'] = False
update_process = Process(target = updateLeagueEvents, args = (api_key,'past',logf))
update_process.start()
update_processes.append(update_process)
# sports table
sports_time = datetime.strptime(last_updates['sports_t']['time'], "%d/%m/%Y %H:%M:%S")
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
diff = (NY_time - sports_time).total_seconds()/60 #minutes
if last_updates['sports_t']['force'] or diff >= update_frequencies['sports']:# or msg == 'S':
sports_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
#updateSports(api_key)
last_updates['sports_t']['time'] = sports_time
last_updates['sports_t']['force'] = False
update_process = Process(target = updateLeagueTables, args = (api_key,logf))
update_process.start()
update_processes.append(update_process)
#forex updates once every 24hours at 1700 CET
# update if last update was before the previous days closing
forex_time = datetime.strptime(last_updates['forex']['time'], "%d/%m/%Y %H:%M:%S")
CET_time = datetime.now(CET_zone).replace(tzinfo=None)
yday_update = (CET_time.replace(hour=17, minute=00, second=0, microsecond=0) - dt.timedelta(days=1)).replace(tzinfo=None)
diff = (CET_time.replace(tzinfo=None) - forex_time).total_seconds()/60
opening = CET_time.replace(hour=17, minute=0, second=0, microsecond=0).replace(tzinfo=None)
#forex updates between 5pm sunday and 5pm friday every hour
forex_open = datetime.today().weekday() < 4 or (datetime.today().weekday() == 6 and CET_time > opening) or (datetime.today().weekday() == 4 and CET_time < opening)
if last_updates['forex']['force'] or (diff >= update_frequencies['forex'] and forex_open):# or msg == 'f':
forex_time = CET_time.strftime("%d/%m/%Y %H:%M:%S")
last_updates['forex']['time'] = forex_time
last_updates['forex']['force'] = False
#updateForex(api_key)
update_process = Process(target = updateForex, args = (api_key,logf))
update_process.start()
update_processes.append(update_process)
f = open('csv/last_updates.json', 'w+')
json.dump(last_updates, f)
f.close()
for process in update_processes:
if not process.is_alive():
process.join()
process.terminate()
update_processes.remove(process)
time.sleep(10)
except:
pass
# logf = open('log.txt', "a")
# exc_type, exc_obj, exc_tb = sys.exc_info()
# fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
# logf.write(str(e))
# logf.write('. file: ' + fname)
# logf.write('. line: ' + str(exc_tb.tb_lineno))
# logf.write('. type: ' + str(exc_type))
# logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
# logf.close()