2022-01-23 10:24:12 +00:00
# Copyright (C) 2020 Fintic, finticofficial@gmail.com
#
# This file is part of Fintic project, developed by Neythen Treloar and Justin Dunn
#
# This code can not be copied and/or distributed without the express
# permission of Fintic
2023-01-09 10:13:31 +00:00
import psutil
2022-01-23 10:24:12 +00:00
import finnhub
import time
import csv
import pytz
from datetime import datetime , timedelta
2023-01-09 10:13:31 +00:00
import subprocess
2022-01-23 10:24:12 +00:00
import json
2023-01-09 10:13:31 +00:00
import urllib . request
2022-01-23 10:24:12 +00:00
import datetime as dt
import sys , os , base64 , hashlib , hmac , select
import requests
from pycoingecko import CoinGeckoAPI
from newsapi import NewsApiClient
import traceback
from geopy import geocoders
2022-02-28 20:16:31 +00:00
from multiprocessing import Process
2022-01-23 10:24:12 +00:00
2022-10-06 08:38:12 +00:00
try :
time . sleep ( 80 )
2022-11-09 09:01:24 +00:00
f = open ( ' csv/last_updates.json ' , ' r ' )
last_updates = json . load ( f )
f . close ( )
last_updates [ ' stocks ' ] [ ' force ' ] = True
2023-01-09 09:59:07 +00:00
last_updates [ ' sports_l ' ] [ ' force ' ] = True
2022-11-09 09:01:24 +00:00
f = open ( ' csv/last_updates.json ' , ' w ' )
json . dump ( last_updates , f )
f . close ( )
2022-10-06 08:38:12 +00:00
except :
pass
2022-01-23 10:24:12 +00:00
def getInput ( Block = False ) :
if Block or select . select ( [ sys . stdin ] , [ ] , [ ] , 0 ) == ( [ sys . stdin ] , [ ] , [ ] ) :
msg = sys . stdin . read ( 1 )
#sys.stdin.flush()
else :
msg = ' '
return msg
def emptyInfo ( symbols , stock_info ) :
update = False
for symbol in symbols :
if stock_info [ symbol ] == - 1 : # stock with no info
update = True
return update
def updateUpdate ( NY_time ) :
NY_str = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
f = open ( ' csv/last_update.csv ' , ' w+ ' )
f . write ( NY_str + ' \n ' )
f . close ( )
2022-03-05 14:03:11 +00:00
def updateStocks ( api_key , logf ) :
2022-01-23 10:24:12 +00:00
2022-04-07 17:35:38 +00:00
2022-01-23 10:24:12 +00:00
try :
max_stocks = 200
f = open ( ' csv/stocks_settings.json ' , ' r ' )
all_stocks_settings = json . load ( f )
f . close ( )
stock_info = all_stocks_settings [ ' symbols ' ]
symbols = list ( stock_info . keys ( ) )
2022-04-07 17:35:38 +00:00
2022-01-23 10:24:12 +00:00
url = ' https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/stocks?symbols= '
for symbol in symbols :
url + = symbol + ' , '
2022-01-31 19:11:01 +00:00
url + = ' &apiKey= ' + api_key
2022-01-23 10:24:12 +00:00
response = requests . get ( url )
data = response . json ( )
2022-04-07 17:35:38 +00:00
2022-01-23 10:24:12 +00:00
2022-11-09 09:00:37 +00:00
# stock_info = {}
2022-02-28 16:38:24 +00:00
if len ( data ) > 0 :
2022-03-01 18:37:41 +00:00
for symbol in symbols :
for stock in data :
if stock [ ' symbol ' ] == symbol :
stock_info [ stock [ ' symbol ' ] ] = { ' current ' : stock [ ' price ' ] , ' change ' : stock [ ' change_since ' ] , ' percent_change ' : stock [ ' percent ' ] }
2022-03-05 10:37:22 +00:00
2022-02-28 16:38:24 +00:00
all_stocks_settings [ ' symbols ' ] = stock_info
2022-03-01 18:37:41 +00:00
2022-02-28 16:38:24 +00:00
2022-04-07 17:35:38 +00:00
f = open ( ' csv/stocks_settings.json ' , ' w+ ' )
json . dump ( all_stocks_settings , f )
f . close ( )
2022-01-23 10:24:12 +00:00
2022-10-21 09:12:13 +00:00
except :
pass
2022-02-21 18:33:19 +00:00
2022-10-21 09:12:13 +00:00
#logf = open('log.txt', "a")
#exc_type, exc_obj, exc_tb = sys.exc_info()
#fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
#logf.write(str(e))
#logf.write('. file: ' + fname)
#logf.write('. line: ' + str(exc_tb.tb_lineno))
#logf.write('. type: ' + str(exc_type))
#logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
#logf.close()
2022-06-12 17:19:14 +00:00
def updateCommodities ( api_key , logf ) :
try :
f = open ( ' csv/commodities_settings.json ' , ' r ' )
all_commodities_settings = json . load ( f )
f . close ( )
commodity_info = all_commodities_settings [ ' symbols ' ]
symbols = list ( commodity_info . keys ( ) )
2022-01-23 10:24:12 +00:00
2022-06-12 17:19:14 +00:00
url = ' https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/commodities?symbols= '
for symbol in symbols :
url + = symbol + ' , '
url + = ' &apiKey= ' + api_key
response = requests . get ( url )
data = response . json ( )
commodity_info = { }
if len ( data ) > 0 :
for symbol in symbols :
for commodity in data :
if commodity [ ' symbol ' ] == symbol :
commodity_info [ commodity [ ' symbol ' ] ] = { ' current ' : commodity [ ' price ' ] , ' unit ' : commodity [ ' unit ' ] , ' 24hr_change ' : commodity [ ' price_over_24hr ' ] , ' percent_change ' : commodity [ ' percent_over_24hr ' ] }
all_commodities_settings [ ' symbols ' ] = commodity_info
f = open ( ' csv/commodities_settings.json ' , ' w+ ' )
json . dump ( all_commodities_settings , f )
f . close ( )
2022-10-21 09:12:13 +00:00
except :
pass
#logf = open('log.txt', "a")
#exc_type, exc_obj, exc_tb = sys.exc_info()
#fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
#logf.write(str(e))
#logf.write('. file: ' + fname)
#logf.write('. line: ' + str(exc_tb.tb_lineno))
#logf.write('. type: ' + str(exc_type))
#logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
#logf.close()
2022-01-23 10:24:12 +00:00
2022-10-06 05:00:59 +00:00
2023-01-09 08:23:25 +00:00
def updateMovies ( api_key , logf ) :
f = open ( ' csv/movie_settings.json ' , ' r ' )
all_settings = json . load ( f )
f . close ( )
if all_settings [ ' category ' ] == ' Popular Movies ' :
url = ' https://api.themoviedb.org/3/trending/movie/day? '
movieGenre_url = ' https://api.themoviedb.org/3/genre/movie/list?api_key= ' + api_key + ' &language=en-US '
movieGenre_response = requests . get ( movieGenre_url )
movie_genres = movieGenre_response . json ( )
elif all_settings [ ' category ' ] == ' Popular TV ' :
url = ' https://api.themoviedb.org/3/trending/tv/day? '
tvGenre_url = ' https://api.themoviedb.org/3/genre/tv/list?api_key= ' + api_key + ' &language=en-US '
tvGenre_response = requests . get ( tvGenre_url )
tv_genres = tvGenre_response . json ( )
elif all_settings [ ' category ' ] == ' Popular All ' :
url = ' https://api.themoviedb.org/3/trending/all/day? '
movieGenre_url = ' https://api.themoviedb.org/3/genre/movie/list?api_key= ' + api_key + ' &language=en-US '
movieGenre_response = requests . get ( movieGenre_url )
movie_genres = movieGenre_response . json ( )
tvGenre_url = ' https://api.themoviedb.org/3/genre/tv/list?api_key= ' + api_key + ' &language=en-US '
tvGenre_response = requests . get ( tvGenre_url )
tv_genres = tvGenre_response . json ( )
url + = ' api_key= ' + api_key
response = requests . get ( url )
data = response . json ( )
this_out = [ ]
logo_files = [ ]
if len ( data ) > 0 :
movies = data [ ' results ' ]
for movie in movies :
movie_language = movie [ ' original_language ' ]
movie_votes = movie [ ' vote_average ' ]
movie_votes = " {:.1f} " . format ( movie_votes )
try :
movie_titles = movie [ ' title ' ]
movie_date = movie [ ' release_date ' ]
except KeyError :
movie_titles = movie [ ' name ' ]
movie_date = movie [ ' first_air_date ' ]
movie_type = movie [ ' media_type ' ]
movie_genre = movie [ ' genre_ids ' ]
movie_logo = ' https://image.tmdb.org/t/p/w500 ' + movie [ ' backdrop_path ' ]
genrefinal = [ ]
if all_settings [ ' category ' ] == ' Popular Movies ' :
for i in movie_genre :
for genre in movie_genres [ ' genres ' ] :
if genre [ ' name ' ] == ' Science Fiction ' :
genre [ ' name ' ] = ' Sci-Fi '
if i == genre [ ' id ' ] :
i = genre [ ' name ' ]
genrefinal . append ( i )
elif all_settings [ ' category ' ] == ' Popular TV ' :
for i in movie_genre :
for genre in tv_genres [ ' genres ' ] :
if i == genre [ ' id ' ] :
i = genre [ ' name ' ]
genrefinal . append ( i )
elif all_settings [ ' category ' ] == ' Popular All ' :
if movie [ ' media_type ' ] == ' movie ' :
for i in movie_genre :
for genre in movie_genres [ ' genres ' ] :
if genre [ ' name ' ] == ' Science Fiction ' :
genre [ ' name ' ] = ' Sci-Fi '
if i == genre [ ' id ' ] :
i = genre [ ' name ' ]
genrefinal . append ( i )
elif movie [ ' media_type ' ] == ' tv ' :
for i in movie_genre :
for genre in tv_genres [ ' genres ' ] :
if i == genre [ ' id ' ] :
i = genre [ ' name ' ]
genrefinal . append ( i )
this_out . append ( { ' title ' : movie_titles ,
' language ' : movie_language . upper ( ) ,
' votes ' : str ( movie_votes ) ,
' date ' : movie_date ,
' media_type ' : movie_type . capitalize ( ) ,
' genre ' : genrefinal ,
' backdrop ' : movie [ ' backdrop_path ' ] [ 1 : ] ,
' logo ' : movie_logo
} )
logo_files . append ( movie [ ' backdrop_path ' ] [ 1 : ] )
urllib . request . urlretrieve ( movie_logo , ' logos/movies/ ' + movie [ ' backdrop_path ' ] )
for file in os . listdir ( ' logos/movies/ ' ) :
if file not in logo_files :
os . remove ( ' logos/movies/ ' + file )
all_settings [ ' movies ' ] = this_out
f = open ( ' csv/movie_settings.json ' , ' w+ ' )
json . dump ( all_settings , f )
f . close ( )
2022-10-06 05:00:59 +00:00
def updateIndices ( api_key , logf ) :
try :
f = open ( ' csv/indices_settings.json ' , ' r ' )
all_indices_settings = json . load ( f )
f . close ( )
index_info = all_indices_settings [ ' symbols ' ]
symbols = list ( index_info . keys ( ) )
url = ' https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/indices?symbols= '
for symbol in symbols :
url + = symbol + ' , '
url + = ' &apiKey= ' + api_key
response = requests . get ( url )
data = response . json ( )
index_info = { }
if len ( data ) > 0 :
for symbol in symbols :
for index in data :
if index [ ' symbol ' ] == symbol :
index_info [ index [ ' symbol ' ] ] = { ' name ' : index [ ' name ' ] , ' current ' : index [ ' price ' ] , ' point_change ' : index [ ' change ' ] , ' percent_change ' : index [ ' percent_change ' ] }
all_indices_settings [ ' symbols ' ] = index_info
f = open ( ' csv/indices_settings.json ' , ' w+ ' )
json . dump ( all_indices_settings , f )
f . close ( )
2022-10-21 09:12:13 +00:00
except :
pass
2022-10-06 05:00:59 +00:00
2022-10-21 09:12:13 +00:00
#logf = open('log.txt', "a")
#exc_type, exc_obj, exc_tb = sys.exc_info()
#fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
#logf.write(str(e))
#logf.write('. file: ' + fname)
#logf.write('. line: ' + str(exc_tb.tb_lineno))
#logf.write('. type: ' + str(exc_type))
#logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
#logf.close()
2022-10-06 05:00:59 +00:00
2022-03-05 14:03:11 +00:00
def updateCrypto ( api_key , logf ) :
2022-04-07 17:35:38 +00:00
2022-01-23 10:24:12 +00:00
try :
f = open ( ' csv/crypto_settings.json ' , ' r ' )
all_crypto_settings = json . load ( f )
f . close ( )
coin_info = all_crypto_settings [ ' symbols ' ]
symbol_base = list ( coin_info . keys ( ) )
symbols = [ sb . split ( ' , ' ) [ 0 ] for sb in symbol_base ]
bases = [ sb . split ( ' , ' ) [ 1 ] for sb in symbol_base ]
unique_bases = list ( set ( bases ) )
2022-01-29 09:26:55 +00:00
url = ' https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/crypto?symbols= '
2022-01-23 10:24:12 +00:00
2022-01-29 09:26:55 +00:00
for i , s in enumerate ( symbols ) :
url + = bases [ i ] + ' - ' + s + ' , '
url = url [ : - 1 ] #remove last comma
2022-01-31 19:11:01 +00:00
url + = ' &apiKey= ' + api_key
2022-04-07 17:35:38 +00:00
2022-01-29 09:26:55 +00:00
response = requests . get ( url )
data = response . json ( )
2022-04-07 17:35:38 +00:00
2022-01-29 09:26:55 +00:00
2022-01-23 10:24:12 +00:00
2022-02-21 18:33:19 +00:00
coin_info = { }
2022-02-28 16:38:24 +00:00
if len ( data ) > 0 :
2022-03-01 18:37:41 +00:00
for sb in symbol_base :
for i , d in enumerate ( data ) :
symbol = d [ ' symbol ' ]
base = d [ ' currency ' ]
if symbol . upper ( ) + ' , ' + base . upper ( ) == sb :
coin_info [ symbol . upper ( ) + ' , ' + base . upper ( ) ] = { ' current ' : d [ ' price ' ] , ' 24hr_change ' : d [ ' price_over_24hr ' ] , ' percent_change ' : d [ ' percent_over_24hr ' ] }
2022-02-28 16:38:24 +00:00
all_crypto_settings [ ' symbols ' ] = coin_info
2022-03-05 14:03:11 +00:00
f = open ( ' csv/crypto_settings.json ' , ' w+ ' )
json . dump ( all_crypto_settings , f )
f . close ( )
2022-01-23 10:24:12 +00:00
2022-10-21 09:12:13 +00:00
except :
pass
#logf = open('log.txt', "a")
#exc_type, exc_obj, exc_tb = sys.exc_info()
#fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
#logf.write(str(e))
#logf.write('. file: ' + fname)
#logf.write('. line: ' + str(exc_tb.tb_lineno))
#logf.write('. type: ' + str(exc_type))
#logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
#logf.close()
2022-01-23 10:24:12 +00:00
2022-03-05 14:03:11 +00:00
def updateForex ( api_key , logf ) :
2022-01-23 10:24:12 +00:00
2022-04-07 17:35:38 +00:00
2022-01-23 10:24:12 +00:00
try :
f = open ( ' csv/forex_settings.json ' , ' r ' )
all_forex_settings = json . load ( f )
f . close ( )
forex_info = all_forex_settings [ ' symbols ' ]
symbol_base = list ( forex_info . keys ( ) )
symbols = [ sb . split ( ' , ' ) [ 0 ] for sb in symbol_base ]
bases = [ sb . split ( ' , ' ) [ 1 ] for sb in symbol_base ]
unique_bases = list ( set ( bases ) )
targets = ' , ' . join ( symbols )
2022-01-29 09:26:55 +00:00
url = ' https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/forex?symbols= '
2022-01-23 10:24:12 +00:00
2022-01-29 09:26:55 +00:00
for i , s in enumerate ( symbols ) :
url + = s + ' - ' + bases [ i ] + ' , '
url = url [ : - 1 ] #remove last comma
2022-01-31 19:11:01 +00:00
url + = ' &apiKey= ' + api_key
2022-01-29 09:26:55 +00:00
response = requests . get ( url )
data = response . json ( )
2022-01-23 10:24:12 +00:00
2022-01-29 09:26:55 +00:00
2022-02-28 16:38:24 +00:00
if len ( data ) > 0 :
2022-04-07 17:35:38 +00:00
2022-02-28 16:38:24 +00:00
c_dict = { }
2022-03-01 18:37:41 +00:00
for sb in symbol_base :
for d in data :
if d [ ' uid ' ] . replace ( ' / ' , ' , ' ) == sb :
c_dict [ d [ ' uid ' ] . replace ( ' / ' , ' , ' ) ] = { ' current ' : d [ ' rate ' ] , ' 24hr_change ' : d [ ' rate_over_24hr ' ] , ' percent_change ' : d [ ' percent_over_24hr ' ] }
2022-02-28 16:38:24 +00:00
all_forex_settings [ ' symbols ' ] = c_dict
2022-04-07 17:35:38 +00:00
f = open ( " csv/forex_settings.json " , ' w+ ' )
json . dump ( all_forex_settings , f )
f . close ( )
2022-10-21 09:12:13 +00:00
except :
pass
#logf = open('log.txt', "a")
#exc_type, exc_obj, exc_tb = sys.exc_info()
#fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
#logf.write(str(e))
#logf.write('. file: ' + fname)
#logf.write('. line: ' + str(exc_tb.tb_lineno))
#logf.write('. type: ' + str(exc_type))
#logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
#logf.close()
2022-01-23 10:24:12 +00:00
2022-03-05 14:03:11 +00:00
def updateNews ( api_key , logf ) :
2022-01-23 10:24:12 +00:00
#'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news?category=technology'
#'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news?country=GB'
#'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news?lang=en'
2022-04-07 17:35:38 +00:00
2022-01-23 10:24:12 +00:00
try :
2022-04-07 17:35:38 +00:00
f = open ( ' csv/news_settings.json ' , ' r ' )
all_settings = json . load ( f )
f . close ( )
2022-01-23 10:24:12 +00:00
2022-01-31 19:11:01 +00:00
2022-02-24 19:42:33 +00:00
if all_settings [ ' use_country ' ] :
if all_settings [ ' country ' ] == ' Worldwide ' :
2022-03-07 18:10:59 +00:00
url = ' https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news_worldwide? '
2022-02-24 19:42:33 +00:00
else :
c_dict = { ' United States ' : ' US ' , ' Australia ' : ' AU ' , ' Canada ' : ' CA ' , ' Great Britain ' : ' GB ' , ' New Zealand ' : ' NZ ' , ' Ireland ' : ' IE ' , ' Singapore ' : ' SG ' , ' South Africa ' : ' ZA ' }
cc = c_dict [ all_settings [ ' country ' ] ]
url = ' https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news?country= {} ' . format ( cc )
elif all_settings [ ' use_category ' ] :
url = ' https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news?category= {} ' . format ( all_settings [ ' category ' ] )
2022-01-31 19:11:01 +00:00
url + = ' &apiKey= ' + api_key
response = requests . get ( url )
data = response . json ( )
2022-04-07 17:35:38 +00:00
2022-02-28 16:38:24 +00:00
if len ( data ) > 0 :
max_headlines = int ( all_settings [ ' num_headlines ' ] )
#load user settings
headlines = data [ : max_headlines ]
headline_sources = [ headline [ ' source ' ] for headline in headlines ]
2022-01-23 10:24:12 +00:00
2022-02-28 16:38:24 +00:00
headline_titles = [ headline [ ' title ' ] for headline in headlines ]
headline_times = [ headline [ ' publishedAt ' ] for headline in headlines ]
headlines = list ( zip ( headline_titles , headline_sources , headline_times ) )
2022-04-07 17:35:38 +00:00
2022-02-28 16:38:24 +00:00
all_settings [ ' headlines ' ] = headlines
2022-04-07 17:35:38 +00:00
f = open ( ' csv/news_settings.json ' , ' w+ ' )
json . dump ( all_settings , f )
f . close ( )
2022-01-23 10:24:12 +00:00
2022-10-21 09:12:13 +00:00
except :
pass
#logf = open('log.txt', "a")
#exc_type, exc_obj, exc_tb = sys.exc_info()
#fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
#logf.write(str(e))
#logf.write('. file: ' + fname)
#logf.write('. line: ' + str(exc_tb.tb_lineno))
#logf.write('. type: ' + str(exc_type))
#logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
#logf.close()
2022-01-23 10:24:12 +00:00
2022-03-05 14:03:11 +00:00
def updateWeather ( api_key , logf ) :
2022-04-07 17:35:38 +00:00
2022-01-23 10:24:12 +00:00
max_cities = 30
2022-01-31 19:11:01 +00:00
2022-01-23 10:24:12 +00:00
try :
gn = geocoders . GeoNames ( username = ' fintic ' )
f = open ( ' csv/daily_weather.json ' , ' r ' )
all_daily_settings = json . load ( f )
f . close ( )
f = open ( ' csv/current_weather.json ' , ' r ' )
all_current_settings = json . load ( f )
f . close ( )
current_locations = list ( all_current_settings [ ' locations ' ] . keys ( ) )
daily_locations = list ( all_daily_settings [ ' locations ' ] . keys ( ) )
all_locations = list ( set ( current_locations + daily_locations ) )
2022-04-07 17:35:38 +00:00
2022-03-05 11:23:47 +00:00
2022-01-23 10:24:12 +00:00
2022-03-05 11:23:47 +00:00
2022-04-07 17:35:38 +00:00
current_weathers = { }
daily_weathers = { }
2022-01-23 10:24:12 +00:00
for location in all_locations :
loc = gn . geocode ( location )
current_weather = { }
lat = loc . latitude
lon = loc . longitude
url = ' https://api.openweathermap.org/data/2.5/onecall?lat= {} &units=metric&lon= {} &appid= {} ' . format ( lat , lon , api_key )
r = requests . get ( url )
weather = r . json ( ) [ ' current ' ]
current_weather [ ' main_weather ' ] = weather [ ' weather ' ] [ 0 ] [ ' main ' ]
current_weather [ ' description ' ] = weather [ ' weather ' ] [ 0 ] [ ' description ' ]
current_weather [ ' temp ' ] = weather [ ' temp ' ]
current_weather [ ' min_temp ' ] = r . json ( ) [ ' daily ' ] [ 0 ] [ ' temp ' ] [ ' min ' ]
current_weather [ ' max_temp ' ] = r . json ( ) [ ' daily ' ] [ 0 ] [ ' temp ' ] [ ' max ' ]
current_weather [ ' feels_like ' ] = weather [ ' feels_like ' ]
current_weather [ ' humidity ' ] = weather [ ' humidity ' ]
current_weather [ ' clouds ' ] = weather [ ' clouds ' ]
current_weather [ ' wind_speed ' ] = weather [ ' wind_speed ' ]
current_weather [ ' wind_direction ' ] = weather [ ' wind_deg ' ]
current_weather [ ' visibility ' ] = weather [ ' visibility ' ]
current_weather [ ' uv ' ] = weather [ ' uvi ' ]
current_weather [ ' rain_chance ' ] = r . json ( ) [ ' hourly ' ] [ 0 ] [ ' pop ' ]
2022-04-07 17:35:38 +00:00
if location in current_locations :
current_weathers [ location ] = current_weather
2022-01-23 10:24:12 +00:00
daily_weather = [ ]
daily = r . json ( ) [ ' daily ' ]
for day in daily :
dct = { }
dct [ ' main_weather ' ] = day [ ' weather ' ] [ 0 ] [ ' main ' ]
dct [ ' description ' ] = day [ ' weather ' ] [ 0 ] [ ' description ' ]
dct [ ' min_temp ' ] = day [ ' temp ' ] [ ' min ' ]
dct [ ' max_temp ' ] = day [ ' temp ' ] [ ' max ' ]
daily_weather . append ( dct )
#add relevant urrent information to first day in daily
daily_weather [ 0 ] [ ' temp ' ] = weather [ ' temp ' ]
daily_weather [ 0 ] [ ' rain_chance ' ] = current_weather [ ' rain_chance ' ]
daily_weather [ 0 ] [ ' humidity ' ] = current_weather [ ' humidity ' ]
daily_weather [ 0 ] [ ' wind_speed ' ] = current_weather [ ' wind_speed ' ]
daily_weather [ 0 ] [ ' uv ' ] = current_weather [ ' uv ' ]
daily_weather [ 0 ] [ ' clouds ' ] = current_weather [ ' clouds ' ]
daily_weather [ 0 ] [ ' wind_speed ' ] = current_weather [ ' wind_speed ' ]
daily_weather [ 0 ] [ ' wind_direction ' ] = current_weather [ ' wind_direction ' ]
daily_weather [ 0 ] [ ' visibility ' ] = current_weather [ ' visibility ' ]
2022-04-07 17:35:38 +00:00
if location in daily_locations :
daily_weathers [ location ] = daily_weather
2022-04-11 17:33:28 +00:00
2022-01-23 10:24:12 +00:00
all_current_settings [ ' locations ' ] = current_weathers
all_daily_settings [ ' locations ' ] = daily_weathers
2022-04-07 17:35:38 +00:00
f = open ( " csv/current_weather.json " , ' w+ ' )
json . dump ( all_current_settings , f )
f . close ( )
f = open ( " csv/daily_weather.json " , ' w+ ' )
json . dump ( all_daily_settings , f )
f . close ( )
2022-01-23 10:24:12 +00:00
2022-10-21 09:12:13 +00:00
except :
pass
#logf = open('log.txt', "a")
#exc_type, exc_obj, exc_tb = sys.exc_info()
#fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
#logf.write(str(e))
#logf.write('. file: ' + fname)
#logf.write('. line: ' + str(exc_tb.tb_lineno))
#logf.write('. type: ' + str(exc_type))
#logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
2022-04-11 17:33:28 +00:00
2022-10-21 09:12:13 +00:00
#logf.close()
2022-04-11 17:33:28 +00:00
2022-01-23 10:24:12 +00:00
2022-03-05 14:03:11 +00:00
def updateLeagueTables ( api_key , logf ) :
2022-04-07 17:35:38 +00:00
2022-01-29 09:26:55 +00:00
url = ' https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/sports?stats= '
2022-01-23 10:24:12 +00:00
try :
f = open ( ' csv/league_tables.json ' , ' r ' )
all_settings = json . load ( f )
2022-04-07 17:35:38 +00:00
2022-01-23 10:24:12 +00:00
f . close ( )
leagues = all_settings [ ' leagues ' ] . keys ( )
leagues_info = { }
2022-01-29 09:26:55 +00:00
2022-01-23 10:24:12 +00:00
for league in leagues :
2022-03-03 15:01:42 +00:00
if league == ' PREMIERLEAGUE ' :
2022-03-04 15:00:20 +00:00
url + = ' PREMIERLEAGUE, '
2022-02-26 15:50:10 +00:00
else :
url + = league + ' , '
2022-01-29 09:26:55 +00:00
url = url [ : - 1 ] # remove last comma
2022-01-31 19:11:01 +00:00
url + = ' &apiKey= ' + api_key
2022-01-29 09:26:55 +00:00
r = requests . get ( url )
all_data = r . json ( )
2022-03-05 10:37:22 +00:00
2022-04-07 17:35:38 +00:00
2022-03-05 10:37:22 +00:00
for i , l in enumerate ( all_data ) :
2022-04-07 17:35:38 +00:00
2022-03-05 10:37:22 +00:00
league = list ( l . keys ( ) ) [ 0 ]
2022-04-07 17:35:38 +00:00
2022-01-29 09:26:55 +00:00
2022-01-23 10:24:12 +00:00
teams = [ ]
2022-03-05 10:37:22 +00:00
for d in all_data [ i ] [ league ] :
2022-01-23 10:24:12 +00:00
team = { }
2022-01-29 09:26:55 +00:00
team [ ' name ' ] = d [ ' strTeam ' ]
team [ ' wins ' ] = d [ ' intWin ' ]
team [ ' loss ' ] = d [ ' intLoss ' ]
team [ ' draw ' ] = d [ ' intDraw ' ]
#team['played'] = d['intPlayed']
team [ ' standing ' ] = d [ ' intRank ' ]
#team['points'] = d['intPoints']
2022-01-23 10:24:12 +00:00
teams . append ( team )
2022-01-29 09:26:55 +00:00
leagues_info [ league . upper ( ) ] = teams
2022-01-31 19:11:01 +00:00
2022-01-23 10:24:12 +00:00
all_settings [ ' leagues ' ] = leagues_info
2022-04-07 17:35:38 +00:00
f = open ( " csv/league_tables.json " . format ( league ) , ' w+ ' )
json . dump ( all_settings , f )
f . close ( )
2022-10-21 09:12:13 +00:00
except :
pass
#logf = open('log.txt', "a")
#exc_type, exc_obj, exc_tb = sys.exc_info()
#fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
#logf.write(str(e))
#logf.write('. file: ' + fname)
#logf.write('. line: ' + str(exc_tb.tb_lineno))
#logf.write('. type: ' + str(exc_type))
#logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
#logf.close()
2022-01-23 10:24:12 +00:00
2023-01-09 09:59:07 +00:00
def updatePLtime ( ) :
f = open ( ' csv/live_games.json ' )
try :
all_settings = json . load ( f )
f . close ( )
try :
for league in all_settings [ ' leagues ' ] :
if league == ' PREMIERLEAGUE ' :
subprocess . run ( [ " sudo " , " pkill " , " -f " , " live_pl.py " ] , shell = False )
premierleague = subprocess . Popen ( [ " python3 " , " live_pl.py " ] , shell = False )
except :
pass
except :
pass
2022-03-05 14:03:11 +00:00
def updateLeagueEvents ( api_key , time , logf ) :
2022-04-07 17:35:38 +00:00
2022-01-29 09:26:55 +00:00
url = ' https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/sports? {} = ' . format ( time )
2022-01-23 10:24:12 +00:00
if time == ' past ' :
f = open ( ' csv/past_games.json ' )
elif time == ' upcoming ' :
f = open ( ' csv/upcoming_games.json ' )
2022-01-29 09:26:55 +00:00
elif time == ' livescore ' :
2022-01-23 10:24:12 +00:00
f = open ( ' csv/live_games.json ' )
2022-01-29 09:26:55 +00:00
2022-01-31 19:11:01 +00:00
2022-01-23 10:24:12 +00:00
try :
all_settings = json . load ( f )
f . close ( )
2023-01-09 09:59:07 +00:00
if time == ' livescore ' :
try :
leagues_info = { }
for league in all_settings [ ' leagues ' ] :
events = [ ]
if league == ' NFL ' :
subprocess . run ( [ " sudo " , " pkill " , " -f " , " live_nfl.py " ] , shell = False )
nfl = subprocess . Popen ( [ " python3 " , " live_nfl.py " ] , shell = False )
events . append ( ' Filled ' )
leagues_info [ league . upper ( ) ] = events
if league == ' NBA ' :
subprocess . run ( [ " sudo " , " pkill " , " -f " , " live_nba.py " ] , shell = False )
nba = subprocess . Popen ( [ " python3 " , " live_nba.py " ] , shell = False )
events . append ( ' Filled ' )
leagues_info [ league . upper ( ) ] = events
if league == ' NHL ' :
subprocess . run ( [ " sudo " , " pkill " , " -f " , " live_nhl.py " ] , shell = False )
nhl = subprocess . Popen ( [ " python3 " , " live_nhl.py " ] , shell = False )
events . append ( ' Filled ' )
leagues_info [ league . upper ( ) ] = events
if league == ' MLB ' :
subprocess . run ( [ " sudo " , " pkill " , " -f " , " live_mlb.py " ] , shell = False )
mlb = subprocess . Popen ( [ " python3 " , " live_mlb.py " ] , shell = False )
events . append ( ' Filled ' )
leagues_info [ league . upper ( ) ] = events
if league == ' PREMIERLEAGUE ' :
subprocess . run ( [ " sudo " , " pkill " , " -f " , " live_pl.py " ] , shell = False )
premierleague = subprocess . Popen ( [ " python3 " , " live_pl.py " ] , shell = False )
events . append ( ' Filled ' )
leagues_info [ league . upper ( ) ] = events
if league == ' MLS ' :
subprocess . run ( [ " sudo " , " pkill " , " -f " , " live_mls.py " ] , shell = False )
mls = subprocess . Popen ( [ " python3 " , " live_mls.py " ] , shell = False )
events . append ( ' Filled ' )
leagues_info [ league . upper ( ) ] = events
all_settings [ ' leagues ' ] = leagues_info
f = open ( " csv/live_games.json " , ' w+ ' )
json . dump ( all_settings , f )
f . close ( )
except :
pass
else :
leagues = all_settings [ ' leagues ' ] . keys ( )
leagues_info = { }
for league in leagues :
2022-01-23 10:24:12 +00:00
2023-01-09 09:59:07 +00:00
if league == ' PREMIERLEAGUE ' :
url + = ' PREMIERLEAGUE, '
else :
url + = league + ' , '
url = url [ : - 1 ] # remove last comma
url + = ' &apiKey= ' + api_key
r = requests . get ( url )
all_data = r . json ( )
for league in all_data . keys ( ) :
ten_or_fifteen = slice ( None )
events = [ ]
if ( league == ' PGA ' ) or ( league == ' LPGA ' ) or ( league == ' PGA_EU ' ) :
ten_or_fifteen = slice ( 5 )
2022-03-05 10:37:22 +00:00
else :
2023-01-09 09:59:07 +00:00
ten_or_fifteen = slice ( None )
for d in all_data [ league ] [ ten_or_fifteen ] :
event = { }
event [ ' date ' ] = d [ ' dateEvent ' ]
2022-01-23 10:24:12 +00:00
2023-01-09 09:59:07 +00:00
if time == ' live ' :
event [ ' progess ' ] = d [ ' strProgress ' ]
event [ ' status ' ] = d [ ' strStatus ' ]
else :
if ( league == ' PGA ' ) or ( league == ' LPGA ' ) or ( league == ' PGA_EU ' ) :
event [ ' date ' ] = d [ ' dateEvent ' ]
event [ ' event ' ] = d [ ' strEvent ' ] . replace ( " \u2019 " , " ' " )
event [ ' venue ' ] = d [ ' strVenue ' ] . replace ( " \u2019 " , " ' " )
event [ ' city ' ] = d [ ' strCity ' ] . replace ( " \u2019 " , " ' " )
event [ ' country ' ] = d [ ' strCountry ' ]
event [ ' season ' ] = d [ ' strSeason ' ]
else :
event [ ' round ' ] = d [ ' intRound ' ]
event [ ' time ' ] = d [ ' strTime ' ]
event [ ' home_team ' ] = d [ ' strHomeTeam ' ]
event [ ' away_team ' ] = d [ ' strAwayTeam ' ]
if time != ' upcoming ' :
if ( league == ' PGA ' ) or ( league == ' LPGA ' ) or ( league == ' PGA_EU ' ) :
event [ ' golf_standings ' ] = d [ ' strResult ' ]
rank = [ ' n1 ' , ' n2 ' , ' n3 ' , ' n4 ' , ' n5 ' , ' n6 ' , ' n7 ' , ' n8 ' , ' n9 ' , ' n10 ' , ' T1 ' , ' T2 ' , ' T3 ' , ' T4 ' , ' T5 ' ,
' T6 ' , ' T7 ' , ' T8 ' , ' T9 ' , ' T10 ' ]
def convert ( string ) :
string = repr ( string ) . replace ( ' / ' , ' ' )
li = list ( string . split ( ' \\ ' ) )
return li
str3 = convert ( event [ ' golf_standings ' ] )
players = [ ]
for each in str3 :
each = each . replace ( ' nT ' , ' T ' , 1 )
if each [ : 2 ] in rank :
try :
first_space = each . find ( ' ' , 1 )
second_space = each . find ( ' ' , 4 )
first_name = each [ first_space : second_space ] . lstrip ( )
initial = first_name [ 0 ] + ' . '
each = each . replace ( first_name , initial )
except :
pass
interator = each . find ( ' - ' )
if interator < 0 :
interator = 0
interator2 = each [ interator : interator + 3 ]
result = each . split ( interator2 , 1 ) [ 0 ] + interator2
players . append ( result . rstrip ( ) )
event [ ' golf_standings ' ] = players
else :
event [ ' away_score ' ] = d [ ' intAwayScore ' ]
event [ ' home_score ' ] = d [ ' intHomeScore ' ]
events . append ( event )
leagues_info [ league . upper ( ) ] = events
all_settings [ ' leagues ' ] = leagues_info
2022-04-07 17:35:38 +00:00
2023-01-09 09:59:07 +00:00
f = open ( " csv/ {} _games.json " . format ( time ) , ' w+ ' )
json . dump ( all_settings , f )
f . close ( )
2022-10-21 09:12:13 +00:00
except :
pass
#logf = open('log.txt', "a")
#exc_type, exc_obj, exc_tb = sys.exc_info()
#fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
#logf.write(str(e))
#logf.write('. file: ' + fname)
#logf.write('. line: ' + str(exc_tb.tb_lineno))
#logf.write('. type: ' + str(exc_type))
#logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
#logf.close()
2022-01-23 10:24:12 +00:00
2022-03-05 14:03:11 +00:00
def updateSports ( api_key , logf ) :
2022-01-23 10:24:12 +00:00
#read user settings to decide which sprots to update
2022-01-29 09:26:55 +00:00
2022-03-05 14:03:11 +00:00
updateLeagueTables ( api_key , logf )
2022-01-23 10:24:12 +00:00
2022-03-05 14:03:11 +00:00
updateLeagueEvents ( api_key , ' livescore ' , logf )
updateLeagueEvents ( api_key , ' past ' , logf )
updateLeagueEvents ( api_key , ' upcoming ' , logf )
2022-01-23 10:24:12 +00:00
def checkStocks ( last_update , update_frequency ) :
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
opening = NY_time . replace ( hour = 9 , minute = 30 , second = 0 , microsecond = 0 ) . replace ( tzinfo = None )
2022-04-07 17:35:38 +00:00
closing = NY_time . replace ( hour = 16 , minute = 5 , second = 0 , microsecond = 0 ) . replace ( tzinfo = None )
2022-01-23 10:24:12 +00:00
f = open ( ' csv/stocks_settings.json ' , ' r ' )
all_stocks_settings = json . load ( f )
f . close ( )
stock_info = all_stocks_settings [ ' symbols ' ]
symbols = list ( stock_info . keys ( ) )
updated = False
diff = ( NY_time - last_update ) . total_seconds ( ) / 60 #minutes
if opening < NY_time < closing and datetime . today ( ) . weekday ( ) < 5 : # we need to do real time updating
if diff > = update_frequency :
updated = True
2022-01-31 19:11:01 +00:00
2022-01-23 10:24:12 +00:00
elif emptyInfo ( symbols , stock_info ) : # if theres any empty stocks
updated = True
2022-01-31 19:11:01 +00:00
2022-01-23 10:24:12 +00:00
else :
# update if last update was before the previous days closing
yday_closing = closing - dt . timedelta ( days = 1 )
yday_str = yday_closing . strftime ( " %d / % m/ % Y % H: % M: % S " )
yday_closing = datetime . strptime ( yday_str , " %d / % m/ % Y % H: % M: % S " )
if last_update < yday_closing :
updated = True
2022-01-31 19:11:01 +00:00
2022-01-23 10:24:12 +00:00
return updated
2022-03-05 14:03:11 +00:00
def updateAll ( api_key , weather_key , logf ) :
updateStocks ( api_key , logf )
2022-03-01 18:27:45 +00:00
2022-03-05 14:03:11 +00:00
updateCrypto ( api_key , logf )
2022-03-01 18:27:45 +00:00
2022-03-06 09:29:42 +00:00
updateForex ( api_key , logf )
2022-03-01 18:27:45 +00:00
2022-03-05 14:03:11 +00:00
updateNews ( api_key , logf )
2022-03-01 18:27:45 +00:00
2022-03-05 14:03:11 +00:00
updateSports ( api_key , logf )
2022-03-01 18:27:45 +00:00
2022-02-21 17:18:45 +00:00
if weather_key :
2022-03-05 14:03:11 +00:00
updateWeather ( weather_key , logf )
2022-01-23 10:24:12 +00:00
2023-01-09 09:59:07 +00:00
past_espn_time = True
past_pl_time = True
2022-01-23 10:24:12 +00:00
if __name__ == ' __main__ ' :
logf = open ( " log.txt " , " a " )
t = time . time ( )
2022-01-31 19:11:01 +00:00
2022-01-23 10:24:12 +00:00
2023-01-09 08:23:25 +00:00
update_frequencies = { ' stocks ' : 2 , ' crypto ' : 5 , ' forex ' : 60 , ' news ' : 120 , ' weather ' : 120 , ' sports ' : 1440 , ' commodities ' : 15 , ' indices ' : 15 , ' movies ' : 1440 } #minutes
2022-01-23 10:24:12 +00:00
NY_zone = pytz . timezone ( ' America/New_York ' )
2022-03-04 16:37:29 +00:00
CET_zone = pytz . timezone ( ' EST ' )
2022-01-23 10:24:12 +00:00
NY_time = datetime . now ( NY_zone )
CET_time = datetime . now ( CET_zone )
NY_str = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
CET_str = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
#f = open('csv/last_updates.json', 'w+')
#update_times = {'stocks':NY_str, 'crypto':NY_str, 'news':NY_str, 'weather': NY_str, 'forex': CET_str} # all in NY time apart from forex in CET
#json.dump(update_times, f)
#f.close()
2022-01-31 19:11:01 +00:00
f = open ( ' api_keys.txt ' )
api_keys = f . readlines ( )
api_key = api_keys [ 0 ] . strip ( )
2022-02-26 15:50:10 +00:00
2022-02-24 19:42:33 +00:00
2022-01-31 19:11:01 +00:00
try :
weather_key = api_keys [ 1 ] . strip ( )
except Exception as e :
2022-02-21 17:18:45 +00:00
weather_key = False
2022-04-07 17:35:38 +00:00
logf = open ( ' log.txt ' , " a " )
2022-01-31 19:11:01 +00:00
exc_type , exc_obj , exc_tb = sys . exc_info ( )
fname = os . path . split ( exc_tb . tb_frame . f_code . co_filename ) [ 1 ]
logf . write ( str ( e ) )
logf . write ( ' . file: ' + fname )
logf . write ( ' . line: ' + str ( exc_tb . tb_lineno ) )
logf . write ( ' . type: ' + str ( exc_type ) )
logf . write ( ' \n ' + " " . join ( traceback . format_exception ( sys . exc_info ( ) [ 0 ] , sys . exc_info ( ) [ 1 ] , sys . exc_info ( ) [ 2 ] ) ) )
2022-04-07 17:35:38 +00:00
logf . close ( )
2022-01-31 19:11:01 +00:00
2023-01-09 08:23:25 +00:00
try :
movie_key = open ( ' movie_api_key.txt ' ) . readlines ( ) [ 0 ]
except Exception as e :
movie_key = False
2022-03-06 09:29:42 +00:00
2022-01-23 10:24:12 +00:00
t = time . time ( )
2022-02-28 20:16:31 +00:00
update_processes = [ ]
2022-03-06 09:29:42 +00:00
2022-10-06 08:38:12 +00:00
# try:
# time.sleep(60)
# f = open('csv/last_updates.json', 'r')
# last_updates = json.load(f)
# f.close()
# last_updates['stocks']['force'] = True
# #last_updates['weather']['force'] = True
# f = open('csv/last_updates.json', 'w')
# json.dump(last_updates, f)
# f.close()
# except:
# pass
2022-03-06 09:29:42 +00:00
2022-01-23 10:24:12 +00:00
try :
while True :
2022-03-06 09:29:42 +00:00
try :
f = open ( ' csv/last_updates.json ' , ' r ' )
last_updates = json . load ( f )
f . close ( )
except :
2022-03-07 17:34:24 +00:00
last_updates = { " stocks " : { " time " : " 06/03/2022 04:12:09 " , " force " : True } , " crypto " : { " time " : " 06/03/2022 04:10:39 " , " force " : True } ,
" news " : { " time " : " 06/03/2022 04:07:09 " , " force " : True } , " weather " : { " time " : " 06/03/2022 04:08:20 " , " force " : True } ,
" forex " : { " time " : " 06/03/2022 03:54:02 " , " force " : True } , " sports_l " : { " time " : " 06/03/2022 04:10:09 " , " force " : True } ,
" sports_p " : { " time " : " 06/03/2022 04:10:09 " , " force " : True } ,
2023-01-09 08:23:25 +00:00
" sports_u " : { " time " : " 06/03/2022 04:10:09 " , " force " : True } , " sports_t " : { " time " : " 06/03/2022 04:10:09 " , " force " : True } , " commodities " : { " time " : " 06/03/2022 04:10:09 " , " force " : True } , " indices " : { " time " : " 06/03/2022 04:10:09 " , " force " : True } , " movies " : { " time " : " 06/03/2022 04:10:09 " , " force " : True } }
2022-03-06 09:29:42 +00:00
2022-03-07 17:34:24 +00:00
2022-01-23 10:24:12 +00:00
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
2022-03-06 09:29:42 +00:00
#msg = getInput()
2022-03-01 18:27:45 +00:00
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
2022-01-23 10:24:12 +00:00
#stocks
2022-03-06 09:29:42 +00:00
stock_time = datetime . strptime ( last_updates [ ' stocks ' ] [ ' time ' ] , " %d / % m/ % Y % H: % M: % S " )
2022-01-23 10:24:12 +00:00
stock_frequency = update_frequencies [ ' stocks ' ]
2022-02-28 20:57:46 +00:00
diff = ( NY_time - stock_time ) . total_seconds ( ) / 60 #minutes
2022-03-06 10:04:03 +00:00
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
opening = NY_time . replace ( hour = 9 , minute = 30 , second = 0 , microsecond = 0 ) . replace ( tzinfo = None )
2022-04-07 17:35:38 +00:00
closing = NY_time . replace ( hour = 16 , minute = 5 , second = 0 , microsecond = 0 ) . replace ( tzinfo = None )
2022-03-06 10:04:03 +00:00
stock_open = opening < NY_time < closing and datetime . today ( ) . weekday ( ) < = 4
if last_updates [ ' stocks ' ] [ ' force ' ] or ( diff > = update_frequencies [ ' stocks ' ] and stock_open ) : # or msg == 's':
2022-01-23 10:24:12 +00:00
stock_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
2022-03-06 09:29:42 +00:00
last_updates [ ' stocks ' ] [ ' time ' ] = stock_time
last_updates [ ' stocks ' ] [ ' force ' ] = False
2022-02-28 20:16:31 +00:00
#updateStocks(api_key)
2022-03-05 14:03:11 +00:00
update_process = Process ( target = updateStocks , args = ( api_key , logf ) )
2022-02-28 20:16:31 +00:00
update_process . start ( )
update_processes . append ( update_process )
2022-01-23 10:24:12 +00:00
# crypto
2022-03-06 09:29:42 +00:00
crypto_time = datetime . strptime ( last_updates [ ' crypto ' ] [ ' time ' ] , " %d / % m/ % Y % H: % M: % S " )
2022-01-23 10:24:12 +00:00
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
diff = ( NY_time - crypto_time ) . total_seconds ( ) / 60 #minutes
2022-03-05 11:23:47 +00:00
2022-03-06 09:29:42 +00:00
if last_updates [ ' crypto ' ] [ ' force ' ] or diff > = update_frequencies [ ' crypto ' ] : # or msg == 'c':
2022-01-23 10:24:12 +00:00
crypto_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
2022-03-06 09:29:42 +00:00
#updateCrypto(api_key, logf)
2022-03-05 14:03:11 +00:00
2022-03-06 09:29:42 +00:00
last_updates [ ' crypto ' ] [ ' time ' ] = crypto_time
last_updates [ ' crypto ' ] [ ' force ' ] = False
update_process = Process ( target = updateCrypto , args = ( api_key , logf ) )
update_process . start ( )
update_processes . append ( update_process )
2022-06-12 17:19:14 +00:00
# commodities
commodities_time = datetime . strptime ( last_updates [ ' commodities ' ] [ ' time ' ] , " %d / % m/ % Y % H: % M: % S " )
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
diff = ( NY_time - commodities_time ) . total_seconds ( ) / 60 #minutes
if last_updates [ ' commodities ' ] [ ' force ' ] or diff > = update_frequencies [ ' commodities ' ] : # or msg == 'c':
commodities_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
#updateCrypto(api_key, logf)
last_updates [ ' commodities ' ] [ ' time ' ] = commodities_time
last_updates [ ' commodities ' ] [ ' force ' ] = False
update_process = Process ( target = updateCommodities , args = ( api_key , logf ) )
update_process . start ( )
update_processes . append ( update_process )
2022-10-06 05:00:59 +00:00
# indices
indices_time = datetime . strptime ( last_updates [ ' indices ' ] [ ' time ' ] , " %d / % m/ % Y % H: % M: % S " )
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
diff = ( NY_time - indices_time ) . total_seconds ( ) / 60 #minutes
if last_updates [ ' indices ' ] [ ' force ' ] or diff > = update_frequencies [ ' indices ' ] : # or msg == 'c':
indices_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
last_updates [ ' indices ' ] [ ' time ' ] = indices_time
last_updates [ ' indices ' ] [ ' force ' ] = False
update_process = Process ( target = updateIndices , args = ( api_key , logf ) )
update_process . start ( )
update_processes . append ( update_process )
2023-01-09 08:23:25 +00:00
# movies
movies_time = datetime . strptime ( last_updates [ ' movies ' ] [ ' time ' ] , " %d / % m/ % Y % H: % M: % S " )
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
diff = ( NY_time - movies_time ) . total_seconds ( ) / 60 #minutes
if last_updates [ ' movies ' ] [ ' force ' ] or diff > = update_frequencies [ ' movies ' ] :
movies_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
last_updates [ ' movies ' ] [ ' time ' ] = movies_time
last_updates [ ' movies ' ] [ ' force ' ] = False
update_process = Process ( target = updateMovies , args = ( movie_key , logf ) )
update_process . start ( )
update_processes . append ( update_process )
2022-01-23 10:24:12 +00:00
# weather
2022-03-06 09:29:42 +00:00
weather_time = datetime . strptime ( last_updates [ ' weather ' ] [ ' time ' ] , " %d / % m/ % Y % H: % M: % S " )
2022-01-23 10:24:12 +00:00
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
diff = ( NY_time - weather_time ) . total_seconds ( ) / 60 #minutes
2022-03-06 09:29:42 +00:00
if last_updates [ ' weather ' ] [ ' force ' ] or diff > = update_frequencies [ ' weather ' ] : # or msg == 'w':
2022-01-23 10:24:12 +00:00
weather_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
2022-02-28 20:16:31 +00:00
#updateWeather(weather_key)
2022-03-06 09:29:42 +00:00
last_updates [ ' weather ' ] [ ' time ' ] = weather_time
last_updates [ ' weather ' ] [ ' force ' ] = False
2022-03-05 14:03:11 +00:00
update_process = Process ( target = updateWeather , args = ( weather_key , logf ) )
2022-02-28 20:16:31 +00:00
update_process . start ( )
update_processes . append ( update_process )
2022-01-23 10:24:12 +00:00
# news
2022-03-06 09:29:42 +00:00
news_time = datetime . strptime ( last_updates [ ' news ' ] [ ' time ' ] , " %d / % m/ % Y % H: % M: % S " )
2022-01-23 10:24:12 +00:00
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
diff = ( NY_time - news_time ) . total_seconds ( ) / 60 #minutes
2022-03-06 09:29:42 +00:00
if last_updates [ ' news ' ] [ ' force ' ] or diff > = update_frequencies [ ' news ' ] : # or msg == 'n':
2022-01-23 10:24:12 +00:00
news_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
2022-02-28 20:16:31 +00:00
#updateNews(api_key)
2022-03-06 09:29:42 +00:00
last_updates [ ' news ' ] [ ' time ' ] = news_time
last_updates [ ' news ' ] [ ' force ' ] = False
2022-03-05 14:03:11 +00:00
update_process = Process ( target = updateNews , args = ( api_key , logf ) )
2022-02-28 20:16:31 +00:00
update_process . start ( )
update_processes . append ( update_process )
2022-03-07 17:34:24 +00:00
# sports upcoming
sports_time = datetime . strptime ( last_updates [ ' sports_u ' ] [ ' time ' ] , " %d / % m/ % Y % H: % M: % S " )
2022-01-23 10:24:12 +00:00
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
diff = ( NY_time - sports_time ) . total_seconds ( ) / 60 #minutes
2022-03-07 17:34:24 +00:00
if last_updates [ ' sports_u ' ] [ ' force ' ] or diff > = update_frequencies [ ' sports ' ] : # or msg == 'S':
2022-01-23 10:24:12 +00:00
sports_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
2022-02-28 20:16:31 +00:00
#updateSports(api_key)
2022-03-07 17:34:24 +00:00
last_updates [ ' sports_u ' ] [ ' time ' ] = sports_time
last_updates [ ' sports_u ' ] [ ' force ' ] = False
update_process = Process ( target = updateLeagueEvents , args = ( api_key , ' upcoming ' , logf ) )
2022-02-28 20:16:31 +00:00
update_process . start ( )
update_processes . append ( update_process )
2022-03-07 17:34:24 +00:00
# sports live
sports_time = datetime . strptime ( last_updates [ ' sports_l ' ] [ ' time ' ] , " %d / % m/ % Y % H: % M: % S " )
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
2023-01-09 09:59:07 +00:00
espn_time = " 17:00Z "
espn_time_est = " 12:00 "
# if datetime.now(pytz.utc).strftime("%H:%MZ") < espn_time:
if datetime . now ( pytz . timezone ( ' America/New_York ' ) ) . strftime ( " % H: % M " ) < espn_time_est :
past_espn_time = True
if last_updates [ ' sports_l ' ] [ ' force ' ] or ( datetime . now ( pytz . timezone ( ' America/New_York ' ) ) . strftime ( " % H: % M " ) > = espn_time_est and past_espn_time ) : # or msg == 'S':
# if last_updates['sports_l']['force'] or (datetime.now(pytz.utc).strftime("%H:%MZ") >= espn_time and past_espn_time):# or msg == 'S':
2022-03-07 17:34:24 +00:00
sports_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
last_updates [ ' sports_l ' ] [ ' time ' ] = sports_time
last_updates [ ' sports_l ' ] [ ' force ' ] = False
2023-01-09 09:59:07 +00:00
past_espn_time = False
2022-03-07 17:34:24 +00:00
update_process = Process ( target = updateLeagueEvents , args = ( api_key , ' livescore ' , logf ) )
update_process . start ( )
update_processes . append ( update_process )
2023-01-09 09:59:07 +00:00
#sports live (premier league)
pl_time = " 12:00Z "
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
sports_time = datetime . strptime ( last_updates [ ' sports_l ' ] [ ' time ' ] , " %d / % m/ % Y % H: % M: % S " )
if datetime . now ( pytz . utc ) . strftime ( " % H: % MZ " ) < pl_time :
past_pl_time = True
if datetime . now ( pytz . utc ) . strftime ( " % H: % MZ " ) > = pl_time and past_pl_time : # or msg == 'S':
sports_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
past_pl_time = False
last_updates [ ' sports_l ' ] [ ' time ' ] = sports_time
last_updates [ ' sports_l ' ] [ ' force ' ] = False
update_process = Process ( target = updatePLtime )
update_process . start ( )
update_processes . append ( update_process )
2022-03-07 17:34:24 +00:00
# sports past
sports_time = datetime . strptime ( last_updates [ ' sports_p ' ] [ ' time ' ] , " %d / % m/ % Y % H: % M: % S " )
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
diff = ( NY_time - sports_time ) . total_seconds ( ) / 60 #minutes
if last_updates [ ' sports_p ' ] [ ' force ' ] or diff > = update_frequencies [ ' sports ' ] : # or msg == 'S':
sports_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
#updateSports(api_key)
last_updates [ ' sports_p ' ] [ ' time ' ] = sports_time
last_updates [ ' sports_p ' ] [ ' force ' ] = False
update_process = Process ( target = updateLeagueEvents , args = ( api_key , ' past ' , logf ) )
update_process . start ( )
update_processes . append ( update_process )
# sports table
sports_time = datetime . strptime ( last_updates [ ' sports_t ' ] [ ' time ' ] , " %d / % m/ % Y % H: % M: % S " )
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
diff = ( NY_time - sports_time ) . total_seconds ( ) / 60 #minutes
if last_updates [ ' sports_t ' ] [ ' force ' ] or diff > = update_frequencies [ ' sports ' ] : # or msg == 'S':
sports_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
#updateSports(api_key)
last_updates [ ' sports_t ' ] [ ' time ' ] = sports_time
last_updates [ ' sports_t ' ] [ ' force ' ] = False
update_process = Process ( target = updateLeagueTables , args = ( api_key , logf ) )
update_process . start ( )
update_processes . append ( update_process )
2022-01-23 10:24:12 +00:00
#forex updates once every 24hours at 1700 CET
# update if last update was before the previous days closing
2022-03-06 09:29:42 +00:00
forex_time = datetime . strptime ( last_updates [ ' forex ' ] [ ' time ' ] , " %d / % m/ % Y % H: % M: % S " )
2022-03-04 16:32:41 +00:00
CET_time = datetime . now ( CET_zone ) . replace ( tzinfo = None )
2022-01-23 10:24:12 +00:00
yday_update = ( CET_time . replace ( hour = 17 , minute = 00 , second = 0 , microsecond = 0 ) - dt . timedelta ( days = 1 ) ) . replace ( tzinfo = None )
diff = ( CET_time . replace ( tzinfo = None ) - forex_time ) . total_seconds ( ) / 60
opening = CET_time . replace ( hour = 17 , minute = 0 , second = 0 , microsecond = 0 ) . replace ( tzinfo = None )
#forex updates between 5pm sunday and 5pm friday every hour
forex_open = datetime . today ( ) . weekday ( ) < 4 or ( datetime . today ( ) . weekday ( ) == 6 and CET_time > opening ) or ( datetime . today ( ) . weekday ( ) == 4 and CET_time < opening )
2022-03-06 09:29:42 +00:00
if last_updates [ ' forex ' ] [ ' force ' ] or ( diff > = update_frequencies [ ' forex ' ] and forex_open ) : # or msg == 'f':
2022-01-23 10:24:12 +00:00
forex_time = CET_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
2022-03-06 09:29:42 +00:00
last_updates [ ' forex ' ] [ ' time ' ] = forex_time
last_updates [ ' forex ' ] [ ' force ' ] = False
2022-02-28 20:16:31 +00:00
#updateForex(api_key)
2022-03-05 14:03:11 +00:00
update_process = Process ( target = updateForex , args = ( api_key , logf ) )
2022-02-28 20:16:31 +00:00
update_process . start ( )
update_processes . append ( update_process )
2022-01-23 10:24:12 +00:00
2022-03-06 09:29:42 +00:00
f = open ( ' csv/last_updates.json ' , ' w+ ' )
json . dump ( last_updates , f )
f . close ( )
2022-02-28 20:16:31 +00:00
for process in update_processes :
if not process . is_alive ( ) :
process . join ( )
process . terminate ( )
2022-03-05 14:03:11 +00:00
update_processes . remove ( process )
2022-04-07 17:35:38 +00:00
2022-03-06 09:29:42 +00:00
time . sleep ( 10 )
2022-01-23 10:24:12 +00:00
2022-10-21 09:12:13 +00:00
except :
pass
2022-04-07 17:35:38 +00:00
2022-03-07 17:43:34 +00:00
2022-10-21 09:12:13 +00:00
# logf = open('log.txt', "a")
# exc_type, exc_obj, exc_tb = sys.exc_info()
# fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
# logf.write(str(e))
# logf.write('. file: ' + fname)
# logf.write('. line: ' + str(exc_tb.tb_lineno))
# logf.write('. type: ' + str(exc_type))
# logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
# logf.close()
2022-01-23 10:24:12 +00:00
2022-03-05 14:03:11 +00:00
2022-01-23 10:24:12 +00:00
2022-04-07 17:35:38 +00:00