2022-01-23 10:24:12 +00:00
# Copyright (C) 2020 Fintic, finticofficial@gmail.com
#
# This file is part of Fintic project, developed by Neythen Treloar and Justin Dunn
#
# This code can not be copied and/or distributed without the express
# permission of Fintic
2023-05-24 11:38:14 +00:00
import pickle
2022-01-23 10:24:12 +00:00
import finnhub
import time
import csv
import pytz
from datetime import datetime , timedelta
2023-01-09 10:13:31 +00:00
import subprocess
2022-01-23 10:24:12 +00:00
import json
2023-01-09 10:13:31 +00:00
import urllib . request
2022-01-23 10:24:12 +00:00
import datetime as dt
import sys , os , base64 , hashlib , hmac , select
import requests
from pycoingecko import CoinGeckoAPI
from newsapi import NewsApiClient
import traceback
from geopy import geocoders
2022-02-28 20:16:31 +00:00
from multiprocessing import Process
2022-01-23 10:24:12 +00:00
2022-10-06 08:38:12 +00:00
try :
time . sleep ( 80 )
2022-11-09 09:01:24 +00:00
f = open ( ' csv/last_updates.json ' , ' r ' )
last_updates = json . load ( f )
f . close ( )
last_updates [ ' stocks ' ] [ ' force ' ] = True
2023-03-29 08:46:13 +00:00
last_updates [ ' prepost ' ] [ ' force ' ] = True
2023-01-09 09:59:07 +00:00
last_updates [ ' sports_l ' ] [ ' force ' ] = True
2022-11-09 09:01:24 +00:00
f = open ( ' csv/last_updates.json ' , ' w ' )
json . dump ( last_updates , f )
f . close ( )
2022-10-06 08:38:12 +00:00
except :
pass
2023-04-21 08:46:43 +00:00
try :
f = open ( ' csv/scheduler.json ' , ' r ' )
schedules = json . load ( f )
f . close ( )
2023-04-21 11:10:21 +00:00
shutdown_schedule_hour = schedules [ ' shutdown ' ] [ ' hour ' ]
shutdown_schedule_minute = schedules [ ' shutdown ' ] [ ' minute ' ]
reboot_schedule_hour = schedules [ ' reboot ' ] [ ' hour ' ]
reboot_schedule_minute = schedules [ ' reboot ' ] [ ' minute ' ]
2023-04-21 08:46:43 +00:00
timezone = schedules [ ' timezone ' ]
shutdown_enabled = schedules [ ' shutdown ' ] [ ' enabled ' ]
reboot_enabled = schedules [ ' reboot ' ] [ ' enabled ' ]
except :
2023-04-21 11:10:21 +00:00
shutdown_schedule_hour = " 00 "
shutdown_schedule_minute = " 00 "
reboot_schedule_hour = " 00 "
reboot_schedule_minute = " 00 "
2023-04-21 08:46:43 +00:00
timezone = " GMT "
shutdown_enabled = False
reboot_enabled = False
2022-10-06 08:38:12 +00:00
2022-01-23 10:24:12 +00:00
def getInput ( Block = False ) :
if Block or select . select ( [ sys . stdin ] , [ ] , [ ] , 0 ) == ( [ sys . stdin ] , [ ] , [ ] ) :
msg = sys . stdin . read ( 1 )
#sys.stdin.flush()
else :
msg = ' '
return msg
def emptyInfo ( symbols , stock_info ) :
update = False
for symbol in symbols :
if stock_info [ symbol ] == - 1 : # stock with no info
update = True
return update
def updateUpdate ( NY_time ) :
NY_str = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
f = open ( ' csv/last_update.csv ' , ' w+ ' )
f . write ( NY_str + ' \n ' )
f . close ( )
2022-03-05 14:03:11 +00:00
def updateStocks ( api_key , logf ) :
2022-01-23 10:24:12 +00:00
2022-04-07 17:35:38 +00:00
2022-01-23 10:24:12 +00:00
try :
max_stocks = 200
f = open ( ' csv/stocks_settings.json ' , ' r ' )
all_stocks_settings = json . load ( f )
f . close ( )
stock_info = all_stocks_settings [ ' symbols ' ]
symbols = list ( stock_info . keys ( ) )
2022-04-07 17:35:38 +00:00
2022-01-23 10:24:12 +00:00
url = ' https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/stocks?symbols= '
for symbol in symbols :
url + = symbol + ' , '
2022-01-31 19:11:01 +00:00
url + = ' &apiKey= ' + api_key
2022-01-23 10:24:12 +00:00
response = requests . get ( url )
data = response . json ( )
2022-04-07 17:35:38 +00:00
2022-01-23 10:24:12 +00:00
2022-11-09 09:00:37 +00:00
# stock_info = {}
2022-02-28 16:38:24 +00:00
if len ( data ) > 0 :
2022-03-01 18:37:41 +00:00
for symbol in symbols :
for stock in data :
if stock [ ' symbol ' ] == symbol :
stock_info [ stock [ ' symbol ' ] ] = { ' current ' : stock [ ' price ' ] , ' change ' : stock [ ' change_since ' ] , ' percent_change ' : stock [ ' percent ' ] }
2022-03-05 10:37:22 +00:00
2022-02-28 16:38:24 +00:00
all_stocks_settings [ ' symbols ' ] = stock_info
2022-03-01 18:37:41 +00:00
2022-02-28 16:38:24 +00:00
2022-04-07 17:35:38 +00:00
f = open ( ' csv/stocks_settings.json ' , ' w+ ' )
json . dump ( all_stocks_settings , f )
f . close ( )
2022-01-23 10:24:12 +00:00
2022-10-21 09:12:13 +00:00
except :
pass
2022-02-21 18:33:19 +00:00
2022-10-21 09:12:13 +00:00
#logf = open('log.txt', "a")
#exc_type, exc_obj, exc_tb = sys.exc_info()
#fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
#logf.write(str(e))
#logf.write('. file: ' + fname)
#logf.write('. line: ' + str(exc_tb.tb_lineno))
#logf.write('. type: ' + str(exc_type))
#logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
#logf.close()
2022-06-12 17:19:14 +00:00
2023-03-29 08:46:13 +00:00
2023-05-24 09:57:47 +00:00
def getCookiesnCrumb ( ) :
headers = { ' Accept ' : ' text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7 ' ,
' User-Agent ' : ' Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36 ' }
cookie_url = ' https://finance.yahoo.com '
crumb_url = ' https://query1.finance.yahoo.com/v1/test/getcrumb '
session = requests . Session ( )
session . get ( cookie_url , headers = headers )
crumb = session . get ( crumb_url , headers = headers ) . content . decode ( ' utf-8 ' )
with open ( ' session.txt ' , ' wb ' ) as f :
pickle . dump ( session , f )
with open ( ' crumb.txt ' , ' w ' ) as f :
f . write ( crumb )
2023-03-29 08:46:13 +00:00
2023-05-24 09:57:47 +00:00
def updateStocksPrePost ( api_key , logf ) :
try :
2023-03-29 08:46:13 +00:00
f = open ( ' csv/stocks_settings.json ' , ' r ' )
all_stocks_settings = json . load ( f )
f . close ( )
stock_info = all_stocks_settings [ ' symbols ' ]
symbols = list ( stock_info . keys ( ) )
2023-04-20 16:53:56 +00:00
#KEEP THIS JUST IN CASE V7 GOES DOWN prepost_url = 'https://query2.finance.yahoo.com/v6/finance/quote?symbols='
2023-05-24 11:19:39 +00:00
prepost_url = ' https://query2.finance.yahoo.com/v6/finance/quote?symbols= '
2023-03-29 08:46:13 +00:00
for symbol in symbols :
prepost_url + = symbol + ' , '
prepost_url + = ' &fields=regularMarketPreviousClose,regularMarketPrice,preMarketPrice,preMarketChangePercent,regularMarketChangePercent,regularMarketChange,preMarketChange,postMarketPrice,postMarketChange,postMarketChangePercent®ion=US&lang=en-US '
2023-05-24 09:57:47 +00:00
headers = { ' Accept ' : ' text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7 ' ,
' User-Agent ' : ' Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36 ' }
2023-05-24 11:19:39 +00:00
prepost = requests . get ( prepost_url , headers = headers ) . json ( )
if " ' error ' : { ' code ' " in str ( prepost ) :
while True :
try :
with open ( ' session.txt ' , ' rb ' ) as f :
session = pickle . load ( f )
with open ( ' crumb.txt ' , ' r ' ) as f :
crumb = f . read ( )
except :
getCookiesnCrumb ( )
with open ( ' session.txt ' , ' rb ' ) as f :
session = pickle . load ( f )
with open ( ' crumb.txt ' , ' r ' ) as f :
crumb = f . read ( )
params = { ' crumb ' : crumb }
prepost = session . get ( prepost_url . replace ( ' v6 ' , ' v7 ' ) , headers = headers , params = params ) . json ( )
if " ' error ' : { ' code ' " not in str ( prepost ) :
break
else :
getCookiesnCrumb ( )
time . sleep ( 5 )
2023-04-25 05:25:35 +00:00
2023-05-24 09:57:47 +00:00
prepost_data = prepost [ ' quoteResponse ' ] [ ' result ' ]
2023-03-29 08:46:13 +00:00
time_now = datetime . now ( pytz . timezone ( ' America/New_York ' ) ) . strftime ( " % H: % M EST " )
if len ( prepost_data ) > 0 :
for symbol in symbols :
for stock in prepost_data :
if stock [ ' symbol ' ] == symbol :
stock_info [ stock [ ' symbol ' ] ] = { " time_now " : time_now }
try :
stock_info [ stock [ ' symbol ' ] ] [ ' Pre-market ' ] = { ' preprice ' : ' %.2f ' % stock [ ' preMarketPrice ' ] ,
' prechange ' : ' %.2f ' % stock [ ' preMarketChange ' ] ,
' prepercent ' : ' %.2f ' % stock [ ' preMarketChangePercent ' ] }
except :
2023-03-29 09:41:33 +00:00
try :
stock_info [ stock [ ' symbol ' ] ] [ ' Pre-market ' ] = { ' preprice ' : ' %.2f ' % stock [ ' postMarketPrice ' ] ,
' prechange ' : ' %.2f ' % 0 ,
' prepercent ' : ' %.2f ' % 0 }
except :
stock_info [ stock [ ' symbol ' ] ] [ ' Pre-market ' ] = { ' preprice ' : ' %.2f ' % stock [ ' regularMarketPrice ' ] ,
' prechange ' : ' %.2f ' % 0 ,
' prepercent ' : ' %.2f ' % 0 }
2023-03-29 08:46:13 +00:00
try :
stock_info [ stock [ ' symbol ' ] ] [ ' Post-market ' ] = { ' postprice ' : ' %.2f ' % stock [ ' postMarketPrice ' ] ,
' postchange ' : ' %.2f ' % stock [ ' postMarketChange ' ] ,
' postpercent ' : ' %.2f ' % stock [ ' postMarketChangePercent ' ] }
except :
stock_info [ stock [ ' symbol ' ] ] [ ' Post-market ' ] = { ' postprice ' : ' %.2f ' % stock [ ' regularMarketPrice ' ] ,
' postchange ' : ' %.2f ' % 0 ,
' postpercent ' : ' %.2f ' % 0 }
all_stocks_settings [ ' symbols ' ] = stock_info
with open ( ' csv/prepost_settings.json ' , ' w+ ' ) as f :
json . dump ( all_stocks_settings [ ' symbols ' ] , f )
except :
pass
2022-06-12 17:19:14 +00:00
def updateCommodities ( api_key , logf ) :
try :
f = open ( ' csv/commodities_settings.json ' , ' r ' )
all_commodities_settings = json . load ( f )
f . close ( )
commodity_info = all_commodities_settings [ ' symbols ' ]
symbols = list ( commodity_info . keys ( ) )
2022-01-23 10:24:12 +00:00
2022-06-12 17:19:14 +00:00
url = ' https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/commodities?symbols= '
for symbol in symbols :
url + = symbol + ' , '
url + = ' &apiKey= ' + api_key
response = requests . get ( url )
data = response . json ( )
commodity_info = { }
if len ( data ) > 0 :
for symbol in symbols :
for commodity in data :
if commodity [ ' symbol ' ] == symbol :
commodity_info [ commodity [ ' symbol ' ] ] = { ' current ' : commodity [ ' price ' ] , ' unit ' : commodity [ ' unit ' ] , ' 24hr_change ' : commodity [ ' price_over_24hr ' ] , ' percent_change ' : commodity [ ' percent_over_24hr ' ] }
all_commodities_settings [ ' symbols ' ] = commodity_info
f = open ( ' csv/commodities_settings.json ' , ' w+ ' )
json . dump ( all_commodities_settings , f )
f . close ( )
2022-10-21 09:12:13 +00:00
except :
pass
#logf = open('log.txt', "a")
#exc_type, exc_obj, exc_tb = sys.exc_info()
#fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
#logf.write(str(e))
#logf.write('. file: ' + fname)
#logf.write('. line: ' + str(exc_tb.tb_lineno))
#logf.write('. type: ' + str(exc_type))
#logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
#logf.close()
2022-01-23 10:24:12 +00:00
2023-02-23 05:18:00 +00:00
def human_format ( num ) :
num = float ( ' {:.3g} ' . format ( num ) )
magnitude = 0
while abs ( num ) > = 1000 :
magnitude + = 1
num / = 1000.0
return ' {} {} ' . format ( ' {:f} ' . format ( num ) . rstrip ( ' 0 ' ) . rstrip ( ' . ' ) , [ ' ' , ' K ' , ' M ' , ' B ' , ' T ' ] [ magnitude ] )
2022-10-06 05:00:59 +00:00
2023-02-23 05:18:00 +00:00
2023-01-09 08:23:25 +00:00
def updateMovies ( api_key , logf ) :
f = open ( ' csv/movie_settings.json ' , ' r ' )
all_settings = json . load ( f )
f . close ( )
if all_settings [ ' category ' ] == ' Popular Movies ' :
url = ' https://api.themoviedb.org/3/trending/movie/day? '
movieGenre_url = ' https://api.themoviedb.org/3/genre/movie/list?api_key= ' + api_key + ' &language=en-US '
movieGenre_response = requests . get ( movieGenre_url )
movie_genres = movieGenre_response . json ( )
elif all_settings [ ' category ' ] == ' Popular TV ' :
url = ' https://api.themoviedb.org/3/trending/tv/day? '
tvGenre_url = ' https://api.themoviedb.org/3/genre/tv/list?api_key= ' + api_key + ' &language=en-US '
tvGenre_response = requests . get ( tvGenre_url )
tv_genres = tvGenre_response . json ( )
elif all_settings [ ' category ' ] == ' Popular All ' :
url = ' https://api.themoviedb.org/3/trending/all/day? '
movieGenre_url = ' https://api.themoviedb.org/3/genre/movie/list?api_key= ' + api_key + ' &language=en-US '
movieGenre_response = requests . get ( movieGenre_url )
movie_genres = movieGenre_response . json ( )
tvGenre_url = ' https://api.themoviedb.org/3/genre/tv/list?api_key= ' + api_key + ' &language=en-US '
tvGenre_response = requests . get ( tvGenre_url )
tv_genres = tvGenre_response . json ( )
url + = ' api_key= ' + api_key
response = requests . get ( url )
data = response . json ( )
this_out = [ ]
logo_files = [ ]
if len ( data ) > 0 :
movies = data [ ' results ' ]
for movie in movies :
2023-02-23 05:18:00 +00:00
if movie [ ' media_type ' ] == ' movie ' :
movie_id = movie [ ' id ' ]
box_office_url = ' https://api.themoviedb.org/3/movie/ ' + str ( movie_id ) + ' ?api_key= ' + api_key
box_office_response = requests . get ( box_office_url )
box_office_data = box_office_response . json ( )
budget = human_format ( box_office_data [ ' budget ' ] )
revenue = human_format ( box_office_data [ ' revenue ' ] )
else :
budget = ' 0 '
revenue = ' 0 '
2023-01-09 08:23:25 +00:00
movie_language = movie [ ' original_language ' ]
movie_votes = movie [ ' vote_average ' ]
movie_votes = " {:.1f} " . format ( movie_votes )
try :
movie_titles = movie [ ' title ' ]
movie_date = movie [ ' release_date ' ]
except KeyError :
movie_titles = movie [ ' name ' ]
movie_date = movie [ ' first_air_date ' ]
movie_type = movie [ ' media_type ' ]
movie_genre = movie [ ' genre_ids ' ]
movie_logo = ' https://image.tmdb.org/t/p/w500 ' + movie [ ' backdrop_path ' ]
genrefinal = [ ]
if all_settings [ ' category ' ] == ' Popular Movies ' :
for i in movie_genre :
for genre in movie_genres [ ' genres ' ] :
if genre [ ' name ' ] == ' Science Fiction ' :
genre [ ' name ' ] = ' Sci-Fi '
if i == genre [ ' id ' ] :
i = genre [ ' name ' ]
genrefinal . append ( i )
elif all_settings [ ' category ' ] == ' Popular TV ' :
for i in movie_genre :
for genre in tv_genres [ ' genres ' ] :
if i == genre [ ' id ' ] :
i = genre [ ' name ' ]
genrefinal . append ( i )
elif all_settings [ ' category ' ] == ' Popular All ' :
if movie [ ' media_type ' ] == ' movie ' :
for i in movie_genre :
for genre in movie_genres [ ' genres ' ] :
if genre [ ' name ' ] == ' Science Fiction ' :
genre [ ' name ' ] = ' Sci-Fi '
if i == genre [ ' id ' ] :
i = genre [ ' name ' ]
genrefinal . append ( i )
elif movie [ ' media_type ' ] == ' tv ' :
for i in movie_genre :
for genre in tv_genres [ ' genres ' ] :
if i == genre [ ' id ' ] :
i = genre [ ' name ' ]
genrefinal . append ( i )
this_out . append ( { ' title ' : movie_titles ,
' language ' : movie_language . upper ( ) ,
' votes ' : str ( movie_votes ) ,
' date ' : movie_date ,
' media_type ' : movie_type . capitalize ( ) ,
' genre ' : genrefinal ,
2023-02-23 05:18:00 +00:00
' budget ' : budget ,
' revenue ' : revenue ,
2023-01-09 08:23:25 +00:00
' backdrop ' : movie [ ' backdrop_path ' ] [ 1 : ] ,
' logo ' : movie_logo
} )
logo_files . append ( movie [ ' backdrop_path ' ] [ 1 : ] )
2023-02-23 05:18:00 +00:00
if movie [ ' backdrop_path ' ] [ 1 : ] not in os . listdir ( ' logos/movies/ ' ) :
urllib . request . urlretrieve ( movie_logo , ' logos/movies/ ' + movie [ ' backdrop_path ' ] )
time . sleep ( 0.5 )
2023-01-09 08:23:25 +00:00
for file in os . listdir ( ' logos/movies/ ' ) :
if file not in logo_files :
os . remove ( ' logos/movies/ ' + file )
all_settings [ ' movies ' ] = this_out
f = open ( ' csv/movie_settings.json ' , ' w+ ' )
json . dump ( all_settings , f )
f . close ( )
2023-03-08 07:30:17 +00:00
def updateIpo ( api_key , logf ) :
day = datetime . now ( pytz . utc ) . strftime ( " % Y- % m- %d " )
dt = datetime . strptime ( day , " % Y- % m- %d " )
start = ( dt - timedelta ( days = dt . weekday ( ) ) )
start_date = start . strftime ( " % Y- % m- %d " )
end = start + timedelta ( days = 21 )
end_date = end . strftime ( " % Y- % m- %d " )
ipo_url = ' https://finnhub.io/api/v1/calendar/ipo?from= ' + start_date + ' &to= ' + end_date + ' &token= ' + api_key
f = open ( ' csv/ipo_settings.json ' , ' r ' )
ipo_settings = json . load ( f )
f . close ( )
data = requests . get ( ipo_url )
all_ipo = data . json ( )
ipo_list = [ ]
2023-03-08 07:47:31 +00:00
try :
if len ( all_ipo [ ' ipoCalendar ' ] ) > 0 :
for ipo in all_ipo [ ' ipoCalendar ' ] :
try :
shares = human_format ( ipo [ ' numberOfShares ' ] )
except :
shares = ' N/A '
try :
sharesvalue = human_format ( ipo [ ' totalSharesValue ' ] )
except :
sharesvalue = ' N/A '
ipo_list . append ( {
' date ' : ipo [ ' date ' ] ,
' name ' : ipo [ ' name ' ] ,
' shares ' : shares ,
' price ' : ipo [ ' price ' ] ,
' status ' : ipo [ ' status ' ] ,
' symbol ' : ipo [ ' symbol ' ] ,
' sharesvalue ' : sharesvalue
} )
else :
ipo_list = [ ' No Data ' ]
except :
2023-03-08 07:30:17 +00:00
ipo_list = [ ' No Data ' ]
2023-03-08 07:47:31 +00:00
2023-03-08 07:30:17 +00:00
ipo_settings [ ' symbols ' ] = ipo_list
f = open ( ' csv/ipo_settings.json ' , ' w+ ' )
json . dump ( ipo_settings , f )
f . close ( )
2023-01-09 08:23:25 +00:00
2022-10-06 05:00:59 +00:00
def updateIndices ( api_key , logf ) :
try :
f = open ( ' csv/indices_settings.json ' , ' r ' )
all_indices_settings = json . load ( f )
f . close ( )
index_info = all_indices_settings [ ' symbols ' ]
symbols = list ( index_info . keys ( ) )
url = ' https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/indices?symbols= '
for symbol in symbols :
url + = symbol + ' , '
url + = ' &apiKey= ' + api_key
response = requests . get ( url )
data = response . json ( )
index_info = { }
if len ( data ) > 0 :
for symbol in symbols :
for index in data :
if index [ ' symbol ' ] == symbol :
index_info [ index [ ' symbol ' ] ] = { ' name ' : index [ ' name ' ] , ' current ' : index [ ' price ' ] , ' point_change ' : index [ ' change ' ] , ' percent_change ' : index [ ' percent_change ' ] }
all_indices_settings [ ' symbols ' ] = index_info
f = open ( ' csv/indices_settings.json ' , ' w+ ' )
json . dump ( all_indices_settings , f )
f . close ( )
2022-10-21 09:12:13 +00:00
except :
pass
2022-10-06 05:00:59 +00:00
2022-10-21 09:12:13 +00:00
#logf = open('log.txt', "a")
#exc_type, exc_obj, exc_tb = sys.exc_info()
#fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
#logf.write(str(e))
#logf.write('. file: ' + fname)
#logf.write('. line: ' + str(exc_tb.tb_lineno))
#logf.write('. type: ' + str(exc_type))
#logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
#logf.close()
2022-10-06 05:00:59 +00:00
2022-03-05 14:03:11 +00:00
def updateCrypto ( api_key , logf ) :
2022-04-07 17:35:38 +00:00
2022-01-23 10:24:12 +00:00
try :
f = open ( ' csv/crypto_settings.json ' , ' r ' )
all_crypto_settings = json . load ( f )
f . close ( )
coin_info = all_crypto_settings [ ' symbols ' ]
symbol_base = list ( coin_info . keys ( ) )
symbols = [ sb . split ( ' , ' ) [ 0 ] for sb in symbol_base ]
bases = [ sb . split ( ' , ' ) [ 1 ] for sb in symbol_base ]
unique_bases = list ( set ( bases ) )
2022-01-29 09:26:55 +00:00
url = ' https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/crypto?symbols= '
2022-01-23 10:24:12 +00:00
2022-01-29 09:26:55 +00:00
for i , s in enumerate ( symbols ) :
url + = bases [ i ] + ' - ' + s + ' , '
url = url [ : - 1 ] #remove last comma
2022-01-31 19:11:01 +00:00
url + = ' &apiKey= ' + api_key
2022-04-07 17:35:38 +00:00
2022-01-29 09:26:55 +00:00
response = requests . get ( url )
data = response . json ( )
2022-04-07 17:35:38 +00:00
2022-01-29 09:26:55 +00:00
2022-01-23 10:24:12 +00:00
2022-02-21 18:33:19 +00:00
coin_info = { }
2022-02-28 16:38:24 +00:00
if len ( data ) > 0 :
2022-03-01 18:37:41 +00:00
for sb in symbol_base :
for i , d in enumerate ( data ) :
symbol = d [ ' symbol ' ]
base = d [ ' currency ' ]
if symbol . upper ( ) + ' , ' + base . upper ( ) == sb :
coin_info [ symbol . upper ( ) + ' , ' + base . upper ( ) ] = { ' current ' : d [ ' price ' ] , ' 24hr_change ' : d [ ' price_over_24hr ' ] , ' percent_change ' : d [ ' percent_over_24hr ' ] }
2022-02-28 16:38:24 +00:00
all_crypto_settings [ ' symbols ' ] = coin_info
2022-03-05 14:03:11 +00:00
f = open ( ' csv/crypto_settings.json ' , ' w+ ' )
json . dump ( all_crypto_settings , f )
f . close ( )
2022-01-23 10:24:12 +00:00
2022-10-21 09:12:13 +00:00
except :
pass
#logf = open('log.txt', "a")
#exc_type, exc_obj, exc_tb = sys.exc_info()
#fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
#logf.write(str(e))
#logf.write('. file: ' + fname)
#logf.write('. line: ' + str(exc_tb.tb_lineno))
#logf.write('. type: ' + str(exc_type))
#logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
#logf.close()
2022-01-23 10:24:12 +00:00
2022-03-05 14:03:11 +00:00
def updateForex ( api_key , logf ) :
2022-01-23 10:24:12 +00:00
2022-04-07 17:35:38 +00:00
2022-01-23 10:24:12 +00:00
try :
f = open ( ' csv/forex_settings.json ' , ' r ' )
all_forex_settings = json . load ( f )
f . close ( )
forex_info = all_forex_settings [ ' symbols ' ]
symbol_base = list ( forex_info . keys ( ) )
symbols = [ sb . split ( ' , ' ) [ 0 ] for sb in symbol_base ]
bases = [ sb . split ( ' , ' ) [ 1 ] for sb in symbol_base ]
unique_bases = list ( set ( bases ) )
targets = ' , ' . join ( symbols )
2022-01-29 09:26:55 +00:00
url = ' https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/forex?symbols= '
2022-01-23 10:24:12 +00:00
2022-01-29 09:26:55 +00:00
for i , s in enumerate ( symbols ) :
url + = s + ' - ' + bases [ i ] + ' , '
url = url [ : - 1 ] #remove last comma
2022-01-31 19:11:01 +00:00
url + = ' &apiKey= ' + api_key
2022-01-29 09:26:55 +00:00
response = requests . get ( url )
data = response . json ( )
2022-01-23 10:24:12 +00:00
2022-01-29 09:26:55 +00:00
2022-02-28 16:38:24 +00:00
if len ( data ) > 0 :
2022-04-07 17:35:38 +00:00
2022-02-28 16:38:24 +00:00
c_dict = { }
2022-03-01 18:37:41 +00:00
for sb in symbol_base :
for d in data :
if d [ ' uid ' ] . replace ( ' / ' , ' , ' ) == sb :
c_dict [ d [ ' uid ' ] . replace ( ' / ' , ' , ' ) ] = { ' current ' : d [ ' rate ' ] , ' 24hr_change ' : d [ ' rate_over_24hr ' ] , ' percent_change ' : d [ ' percent_over_24hr ' ] }
2022-02-28 16:38:24 +00:00
all_forex_settings [ ' symbols ' ] = c_dict
2022-04-07 17:35:38 +00:00
f = open ( " csv/forex_settings.json " , ' w+ ' )
json . dump ( all_forex_settings , f )
f . close ( )
2022-10-21 09:12:13 +00:00
except :
pass
#logf = open('log.txt', "a")
#exc_type, exc_obj, exc_tb = sys.exc_info()
#fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
#logf.write(str(e))
#logf.write('. file: ' + fname)
#logf.write('. line: ' + str(exc_tb.tb_lineno))
#logf.write('. type: ' + str(exc_type))
#logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
#logf.close()
2022-01-23 10:24:12 +00:00
2022-03-05 14:03:11 +00:00
def updateNews ( api_key , logf ) :
2022-01-23 10:24:12 +00:00
try :
2022-04-07 17:35:38 +00:00
f = open ( ' csv/news_settings.json ' , ' r ' )
all_settings = json . load ( f )
f . close ( )
2023-03-17 11:03:20 +00:00
2022-02-24 19:42:33 +00:00
if all_settings [ ' use_country ' ] :
if all_settings [ ' country ' ] == ' Worldwide ' :
2022-03-07 18:10:59 +00:00
url = ' https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news_worldwide? '
2022-02-24 19:42:33 +00:00
else :
2023-03-17 11:03:20 +00:00
c_dict = { ' United States ' : ' US ' , ' Australia ' : ' AU ' , ' Canada ' : ' CA ' , ' Great Britain ' : ' GB ' , ' New Zealand ' : ' NZ ' , ' Ireland ' : ' IE ' , ' Singapore ' : ' SG ' , ' South Africa ' : ' ZA ' , ' Germany ' : ' DE ' , ' Hong Kong ' : ' HK ' , ' Japan ' : ' JP ' , ' South Korea ' : ' KR ' , ' China ' : ' CN ' , ' France ' : ' FR ' , ' India ' : ' IN ' , ' Italy ' : ' IT ' , ' Switzerland ' : ' CH ' , ' Netherlands ' : ' NL ' , ' Spain ' : ' ES ' , ' Brazil ' : ' BR ' , ' Portugal ' : ' PT ' }
2022-02-24 19:42:33 +00:00
cc = c_dict [ all_settings [ ' country ' ] ]
url = ' https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news?country= {} ' . format ( cc )
elif all_settings [ ' use_category ' ] :
url = ' https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news?category= {} ' . format ( all_settings [ ' category ' ] )
2022-01-31 19:11:01 +00:00
url + = ' &apiKey= ' + api_key
response = requests . get ( url )
data = response . json ( )
2022-04-07 17:35:38 +00:00
2022-02-28 16:38:24 +00:00
if len ( data ) > 0 :
max_headlines = int ( all_settings [ ' num_headlines ' ] )
#load user settings
headlines = data [ : max_headlines ]
2023-03-17 11:03:20 +00:00
headline_sources = [ headline [ ' source ' ] for headline in headlines ]
2022-02-28 16:38:24 +00:00
headline_titles = [ headline [ ' title ' ] for headline in headlines ]
headline_times = [ headline [ ' publishedAt ' ] for headline in headlines ]
headlines = list ( zip ( headline_titles , headline_sources , headline_times ) )
2022-04-07 17:35:38 +00:00
2022-02-28 16:38:24 +00:00
all_settings [ ' headlines ' ] = headlines
2022-04-07 17:35:38 +00:00
f = open ( ' csv/news_settings.json ' , ' w+ ' )
json . dump ( all_settings , f )
f . close ( )
2022-01-23 10:24:12 +00:00
2022-10-21 09:12:13 +00:00
except :
pass
#logf = open('log.txt', "a")
#exc_type, exc_obj, exc_tb = sys.exc_info()
#fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
#logf.write(str(e))
#logf.write('. file: ' + fname)
#logf.write('. line: ' + str(exc_tb.tb_lineno))
#logf.write('. type: ' + str(exc_type))
#logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
#logf.close()
2022-01-23 10:24:12 +00:00
2022-03-05 14:03:11 +00:00
def updateWeather ( api_key , logf ) :
2022-04-07 17:35:38 +00:00
2022-01-23 10:24:12 +00:00
try :
gn = geocoders . GeoNames ( username = ' fintic ' )
2023-05-23 08:09:20 +00:00
weather_codes = {
0 : [ ' Clear ' , ' Clear sky ' ] ,
1 : [ ' Clouds ' , ' few clouds ' ] , 2 : [ ' Clouds ' , ' scattered clouds ' ] , 3 : [ ' Clouds ' , ' overcast clouds ' ] ,
45 : [ ' Fog ' , ' Fog ' ] , 48 : [ ' Fog ' , ' depositing rime fog ' ] ,
51 : [ ' Drizzle ' , ' Light ' ] , 53 : [ ' Drizzle ' , ' moderate ' ] , 55 : [ ' Drizzle ' , ' dense ' ] , 56 : [ ' Drizzle ' , ' light ' ] , 57 : [ ' Drizzle ' , ' dense ' ] ,
61 : [ ' Rain ' , ' light rain ' ] , 63 : [ ' Rain ' , ' moderate rain ' ] , 65 : [ ' Rain ' , ' very heavy rain ' ] ,
66 : [ ' Rain ' , ' freezing rain ' ] , 67 : [ ' Rain ' , ' freezing rain ' ] ,
71 : [ ' Snow ' , ' slight ' ] , 73 : [ ' Snow ' , ' moderate ' ] , 75 : [ ' Snow ' , ' heavy ' ] , 77 : [ ' Snow ' , ' Snow grains ' ] , 85 : [ ' Snow ' , ' slight ' ] , 86 : [ ' Snow ' , ' heavy ' ] ,
80 : [ ' Rain ' , ' light intensity shower rain ' ] , 81 : [ ' Rain ' , ' shower rain ' ] , 82 : [ ' Rain ' , ' heavy intensity shower rain ' ] ,
95 : [ ' Thunderstorm ' , ' Slight or moderate ' ] , 96 : [ ' Thunderstorm ' , ' slight hail ' ] , 99 : [ ' Thunderstorm ' , ' heavy hail ' ]
}
2022-01-23 10:24:12 +00:00
f = open ( ' csv/daily_weather.json ' , ' r ' )
all_daily_settings = json . load ( f )
f . close ( )
f = open ( ' csv/current_weather.json ' , ' r ' )
all_current_settings = json . load ( f )
f . close ( )
2023-05-23 08:09:20 +00:00
2022-01-23 10:24:12 +00:00
current_locations = list ( all_current_settings [ ' locations ' ] . keys ( ) )
daily_locations = list ( all_daily_settings [ ' locations ' ] . keys ( ) )
all_locations = list ( set ( current_locations + daily_locations ) )
2023-05-23 08:09:20 +00:00
2022-04-07 17:35:38 +00:00
current_weathers = { }
daily_weathers = { }
2023-05-23 08:09:20 +00:00
2022-01-23 10:24:12 +00:00
for location in all_locations :
loc = gn . geocode ( location )
current_weather = { }
2023-05-23 08:09:20 +00:00
2022-01-23 10:24:12 +00:00
lat = loc . latitude
lon = loc . longitude
2023-05-23 08:09:20 +00:00
url = ' https://api.open-meteo.com/v1/forecast?latitude= {} &longitude= {} &hourly=apparent_temperature,temperature_2m,relativehumidity_2m,precipitation_probability,weathercode,cloudcover,visibility,windspeed_10m,winddirection_10m,uv_index,is_day&daily=weathercode,temperature_2m_max,temperature_2m_min¤t_weather=true&timezone=UTC ' . format ( lat , lon )
r = requests . get ( url ) . json ( )
times = r [ ' hourly ' ] [ ' time ' ]
hour_now = datetime . now ( pytz . utc ) . strftime ( ' % Y- % m- %d T % H:00 ' )
index_pos = times . index ( hour_now )
main_weather_code = r [ ' hourly ' ] [ ' weathercode ' ] [ index_pos ]
current_weather [ ' main_weather ' ] = weather_codes [ main_weather_code ] [ 0 ]
current_weather [ ' description ' ] = weather_codes [ main_weather_code ] [ 1 ]
current_weather [ ' temp ' ] = r [ ' hourly ' ] [ ' temperature_2m ' ] [ index_pos ]
current_weather [ ' min_temp ' ] = r [ ' daily ' ] [ ' temperature_2m_min ' ] [ 0 ]
current_weather [ ' max_temp ' ] = r [ ' daily ' ] [ ' temperature_2m_max ' ] [ 0 ]
current_weather [ ' feels_like ' ] = r [ ' hourly ' ] [ ' apparent_temperature ' ] [ index_pos ]
current_weather [ ' humidity ' ] = r [ ' hourly ' ] [ ' relativehumidity_2m ' ] [ index_pos ]
current_weather [ ' clouds ' ] = r [ ' hourly ' ] [ ' cloudcover ' ] [ index_pos ]
2023-05-23 08:53:14 +00:00
if r [ ' hourly ' ] [ ' visibility ' ] [ index_pos ] > 10000 :
current_weather [ ' visibility ' ] = 10000
else :
current_weather [ ' visibility ' ] = r [ ' hourly ' ] [ ' visibility ' ] [ index_pos ]
2023-05-23 08:09:20 +00:00
current_weather [ ' uv ' ] = r [ ' hourly ' ] [ ' uv_index ' ] [ index_pos ]
current_weather [ ' rain_chance ' ] = r [ ' hourly ' ] [ ' precipitation_probability ' ] [ index_pos ]
current_weather [ ' wind_speed ' ] = r [ ' hourly ' ] [ ' windspeed_10m ' ] [ index_pos ]
current_weather [ ' wind_direction ' ] = r [ ' hourly ' ] [ ' winddirection_10m ' ] [ index_pos ]
current_weather [ ' is_day ' ] = r [ ' hourly ' ] [ ' is_day ' ] [ index_pos ]
2022-04-07 17:35:38 +00:00
if location in current_locations :
current_weathers [ location ] = current_weather
2023-05-23 08:09:20 +00:00
2022-01-23 10:24:12 +00:00
daily_weather = [ ]
2023-05-23 08:09:20 +00:00
daily = r [ ' daily ' ]
for i in range ( 0 , 7 ) :
2022-01-23 10:24:12 +00:00
dct = { }
2023-05-23 08:09:20 +00:00
daily_weather_code = daily [ ' weathercode ' ] [ i ]
dct [ ' main_weather ' ] = weather_codes [ daily_weather_code ] [ 0 ]
dct [ ' description ' ] = weather_codes [ daily_weather_code ] [ 1 ]
dct [ ' min_temp ' ] = daily [ ' temperature_2m_min ' ] [ i ]
dct [ ' max_temp ' ] = daily [ ' temperature_2m_max ' ] [ i ]
2022-01-23 10:24:12 +00:00
daily_weather . append ( dct )
2023-05-23 08:09:20 +00:00
# add relevant urrent information to first day in daily
daily_weather [ 0 ] [ ' temp ' ] = current_weather [ ' temp ' ]
2022-01-23 10:24:12 +00:00
daily_weather [ 0 ] [ ' rain_chance ' ] = current_weather [ ' rain_chance ' ]
daily_weather [ 0 ] [ ' humidity ' ] = current_weather [ ' humidity ' ]
daily_weather [ 0 ] [ ' wind_speed ' ] = current_weather [ ' wind_speed ' ]
daily_weather [ 0 ] [ ' uv ' ] = current_weather [ ' uv ' ]
daily_weather [ 0 ] [ ' clouds ' ] = current_weather [ ' clouds ' ]
daily_weather [ 0 ] [ ' wind_speed ' ] = current_weather [ ' wind_speed ' ]
daily_weather [ 0 ] [ ' wind_direction ' ] = current_weather [ ' wind_direction ' ]
daily_weather [ 0 ] [ ' visibility ' ] = current_weather [ ' visibility ' ]
2023-05-23 08:09:20 +00:00
2022-04-07 17:35:38 +00:00
if location in daily_locations :
daily_weathers [ location ] = daily_weather
2022-04-11 17:33:28 +00:00
2022-01-23 10:24:12 +00:00
all_current_settings [ ' locations ' ] = current_weathers
all_daily_settings [ ' locations ' ] = daily_weathers
2023-05-23 08:09:20 +00:00
f = open ( " csv/current_weather.json " , ' w+ ' )
json . dump ( all_current_settings , f )
2022-04-07 17:35:38 +00:00
f . close ( )
2023-05-23 08:09:20 +00:00
f = open ( " csv/daily_weather.json " , ' w+ ' )
json . dump ( all_daily_settings , f )
2022-04-07 17:35:38 +00:00
f . close ( )
2023-05-23 08:09:20 +00:00
2022-10-21 09:12:13 +00:00
except :
pass
#logf = open('log.txt', "a")
#exc_type, exc_obj, exc_tb = sys.exc_info()
#fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
#logf.write(str(e))
#logf.write('. file: ' + fname)
#logf.write('. line: ' + str(exc_tb.tb_lineno))
#logf.write('. type: ' + str(exc_type))
#logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
2022-04-11 17:33:28 +00:00
2022-10-21 09:12:13 +00:00
#logf.close()
2022-04-11 17:33:28 +00:00
2022-01-23 10:24:12 +00:00
2022-03-05 14:03:11 +00:00
def updateLeagueTables ( api_key , logf ) :
2022-04-07 17:35:38 +00:00
2022-01-29 09:26:55 +00:00
url = ' https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/sports?stats= '
2022-01-23 10:24:12 +00:00
try :
f = open ( ' csv/league_tables.json ' , ' r ' )
all_settings = json . load ( f )
2022-04-07 17:35:38 +00:00
2022-01-23 10:24:12 +00:00
f . close ( )
leagues = all_settings [ ' leagues ' ] . keys ( )
leagues_info = { }
2022-01-29 09:26:55 +00:00
2022-01-23 10:24:12 +00:00
for league in leagues :
2022-03-03 15:01:42 +00:00
if league == ' PREMIERLEAGUE ' :
2022-03-04 15:00:20 +00:00
url + = ' PREMIERLEAGUE, '
2022-02-26 15:50:10 +00:00
else :
url + = league + ' , '
2022-01-29 09:26:55 +00:00
url = url [ : - 1 ] # remove last comma
2022-01-31 19:11:01 +00:00
url + = ' &apiKey= ' + api_key
2022-01-29 09:26:55 +00:00
r = requests . get ( url )
all_data = r . json ( )
2022-03-05 10:37:22 +00:00
for i , l in enumerate ( all_data ) :
league = list ( l . keys ( ) ) [ 0 ]
2022-01-23 10:24:12 +00:00
teams = [ ]
2023-05-05 07:58:51 +00:00
if league == ' pga ' or league == ' lpga ' :
logo_files = [ ]
for d in all_data [ i ] [ league ] :
del d [ ' _id ' ] , d [ ' updated ' ]
teams . append ( d )
try :
if d [ ' country ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] not in os . listdir ( ' logos/ufc_countries/ ' ) :
urllib . request . urlretrieve ( d [ ' country ' ] , ' logos/ufc_countries/ ' + d [ ' country ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] )
except :
pass
try :
if league == ' pga ' :
if d [ ' photo ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] not in os . listdir ( ' logos/pga_rank/ ' ) :
urllib . request . urlretrieve ( d [ ' photo ' ] , ' logos/pga_rank/ ' + d [ ' photo ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] )
elif league == ' lpga ' :
if d [ ' photo ' ] . split ( ' / ' ) [ - 1 ] not in os . listdir ( ' logos/lpga_rank/ ' ) :
urllib . request . urlretrieve ( d [ ' photo ' ] , ' logos/lpga_rank/ ' + d [ ' photo ' ] . split ( ' / ' ) [ - 1 ] )
except :
pass
try :
if league == ' pga ' :
logo_files . append ( d [ ' photo ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] )
elif league == ' lpga ' :
logo_files . append ( d [ ' photo ' ] . split ( ' / ' ) [ - 1 ] )
except :
pass
if league == ' pga ' :
for file in os . listdir ( ' logos/pga_rank/ ' ) :
if file not in logo_files :
os . remove ( ' logos/pga_rank/ ' + file )
elif league == ' lpga ' :
for file in os . listdir ( ' logos/lpga_rank/ ' ) :
if file not in logo_files :
os . remove ( ' logos/lpga_rank/ ' + file )
else :
for d in all_data [ i ] [ league ] :
team = { }
team [ ' name ' ] = d [ ' strTeam ' ]
team [ ' wins ' ] = d [ ' intWin ' ]
team [ ' loss ' ] = d [ ' intLoss ' ]
team [ ' draw ' ] = d [ ' intDraw ' ]
#team['played'] = d['intPlayed']
team [ ' standing ' ] = d [ ' intRank ' ]
#team['points'] = d['intPoints']
teams . append ( team )
2022-01-29 09:26:55 +00:00
leagues_info [ league . upper ( ) ] = teams
2022-01-31 19:11:01 +00:00
2022-01-23 10:24:12 +00:00
all_settings [ ' leagues ' ] = leagues_info
2022-04-07 17:35:38 +00:00
f = open ( " csv/league_tables.json " . format ( league ) , ' w+ ' )
json . dump ( all_settings , f )
f . close ( )
2022-10-21 09:12:13 +00:00
except :
pass
#logf = open('log.txt', "a")
#exc_type, exc_obj, exc_tb = sys.exc_info()
#fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
#logf.write(str(e))
#logf.write('. file: ' + fname)
#logf.write('. line: ' + str(exc_tb.tb_lineno))
#logf.write('. type: ' + str(exc_type))
#logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
#logf.close()
2022-01-23 10:24:12 +00:00
2023-01-09 09:59:07 +00:00
def updatePLtime ( ) :
f = open ( ' csv/live_games.json ' )
try :
all_settings = json . load ( f )
f . close ( )
try :
for league in all_settings [ ' leagues ' ] :
if league == ' PREMIERLEAGUE ' :
subprocess . run ( [ " sudo " , " pkill " , " -f " , " live_pl.py " ] , shell = False )
premierleague = subprocess . Popen ( [ " python3 " , " live_pl.py " ] , shell = False )
except :
pass
except :
pass
2022-03-05 14:03:11 +00:00
def updateLeagueEvents ( api_key , time , logf ) :
2022-04-07 17:35:38 +00:00
2022-01-29 09:26:55 +00:00
url = ' https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/sports? {} = ' . format ( time )
2022-01-23 10:24:12 +00:00
if time == ' past ' :
f = open ( ' csv/past_games.json ' )
elif time == ' upcoming ' :
f = open ( ' csv/upcoming_games.json ' )
2022-01-29 09:26:55 +00:00
elif time == ' livescore ' :
2022-01-23 10:24:12 +00:00
f = open ( ' csv/live_games.json ' )
2022-01-29 09:26:55 +00:00
2022-01-31 19:11:01 +00:00
2022-01-23 10:24:12 +00:00
try :
all_settings = json . load ( f )
f . close ( )
2023-01-09 09:59:07 +00:00
if time == ' livescore ' :
try :
leagues_info = { }
for league in all_settings [ ' leagues ' ] :
events = [ ]
if league == ' NFL ' :
subprocess . run ( [ " sudo " , " pkill " , " -f " , " live_nfl.py " ] , shell = False )
nfl = subprocess . Popen ( [ " python3 " , " live_nfl.py " ] , shell = False )
events . append ( ' Filled ' )
leagues_info [ league . upper ( ) ] = events
if league == ' NBA ' :
subprocess . run ( [ " sudo " , " pkill " , " -f " , " live_nba.py " ] , shell = False )
nba = subprocess . Popen ( [ " python3 " , " live_nba.py " ] , shell = False )
events . append ( ' Filled ' )
leagues_info [ league . upper ( ) ] = events
if league == ' NHL ' :
subprocess . run ( [ " sudo " , " pkill " , " -f " , " live_nhl.py " ] , shell = False )
nhl = subprocess . Popen ( [ " python3 " , " live_nhl.py " ] , shell = False )
events . append ( ' Filled ' )
leagues_info [ league . upper ( ) ] = events
if league == ' MLB ' :
subprocess . run ( [ " sudo " , " pkill " , " -f " , " live_mlb.py " ] , shell = False )
mlb = subprocess . Popen ( [ " python3 " , " live_mlb.py " ] , shell = False )
events . append ( ' Filled ' )
leagues_info [ league . upper ( ) ] = events
if league == ' PREMIERLEAGUE ' :
subprocess . run ( [ " sudo " , " pkill " , " -f " , " live_pl.py " ] , shell = False )
premierleague = subprocess . Popen ( [ " python3 " , " live_pl.py " ] , shell = False )
events . append ( ' Filled ' )
leagues_info [ league . upper ( ) ] = events
if league == ' MLS ' :
subprocess . run ( [ " sudo " , " pkill " , " -f " , " live_mls.py " ] , shell = False )
mls = subprocess . Popen ( [ " python3 " , " live_mls.py " ] , shell = False )
events . append ( ' Filled ' )
leagues_info [ league . upper ( ) ] = events
all_settings [ ' leagues ' ] = leagues_info
f = open ( " csv/live_games.json " , ' w+ ' )
json . dump ( all_settings , f )
f . close ( )
except :
pass
else :
leagues = all_settings [ ' leagues ' ] . keys ( )
leagues_info = { }
for league in leagues :
2022-01-23 10:24:12 +00:00
2023-01-09 09:59:07 +00:00
if league == ' PREMIERLEAGUE ' :
url + = ' PREMIERLEAGUE, '
else :
url + = league + ' , '
url = url [ : - 1 ] # remove last comma
url + = ' &apiKey= ' + api_key
r = requests . get ( url )
all_data = r . json ( )
for league in all_data . keys ( ) :
ten_or_fifteen = slice ( None )
events = [ ]
2023-03-23 10:37:10 +00:00
if ( league == ' PGA ' ) or ( league == ' LPGA ' ) or ( league == ' PGA_EU ' ) or ( league == ' LIV ' ) or ( league == ' F1 ' ) or ( league == ' NASCAR ' ) :
2023-02-02 13:36:27 +00:00
ten_or_fifteen = slice ( 3 )
2022-03-05 10:37:22 +00:00
else :
2023-01-09 09:59:07 +00:00
ten_or_fifteen = slice ( None )
2022-01-23 10:24:12 +00:00
2023-04-17 09:55:53 +00:00
if league == ' UFC ' :
event = all_data [ ' UFC ' ] [ 0 ]
events . append ( event )
2023-04-18 07:12:24 +00:00
if time == ' upcoming ' :
try :
2023-04-18 07:23:24 +00:00
logo_files = [ ]
2023-04-18 07:12:24 +00:00
for each in all_data [ ' UFC ' ] [ 0 ] [ ' fights ' ] :
2023-04-24 17:01:05 +00:00
try :
if each [ ' fighter1pic ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] not in os . listdir ( ' logos/ufc/ ' ) :
urllib . request . urlretrieve ( each [ ' fighter1pic ' ] , ' logos/ufc/ ' + each [ ' fighter1pic ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] )
except :
pass
try :
if each [ ' fighter2pic ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] not in os . listdir ( ' logos/ufc/ ' ) :
urllib . request . urlretrieve ( each [ ' fighter2pic ' ] , ' logos/ufc/ ' + each [ ' fighter2pic ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] )
except :
pass
try :
logo_files . append ( each [ ' fighter2pic ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] )
except :
pass
try :
logo_files . append ( each [ ' fighter1pic ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] )
except :
pass
2023-04-18 07:12:24 +00:00
#country flags
2023-04-25 10:57:42 +00:00
try :
if each [ ' fighter1country ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] not in os . listdir ( ' logos/ufc_countries/ ' ) :
urllib . request . urlretrieve ( each [ ' fighter1country ' ] , ' logos/ufc_countries/ ' + each [ ' fighter1country ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] )
except :
pass
try :
if each [ ' fighter2country ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] not in os . listdir ( ' logos/ufc_countries/ ' ) :
urllib . request . urlretrieve ( each [ ' fighter2country ' ] , ' logos/ufc_countries/ ' + each [ ' fighter2country ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] )
except :
pass
2023-04-18 07:12:24 +00:00
for file in os . listdir ( ' logos/ufc/ ' ) :
if file not in logo_files :
os . remove ( ' logos/ufc/ ' + file )
except :
pass
elif time == ' past ' :
try :
2023-04-18 07:23:24 +00:00
logo_files = [ ]
2023-04-18 07:12:24 +00:00
for each in all_data [ ' UFC ' ] [ 0 ] [ ' fights ' ] :
2023-04-24 17:01:05 +00:00
try :
if each [ ' fighter1pic ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] not in os . listdir ( ' logos/ufc_past/ ' ) :
urllib . request . urlretrieve ( each [ ' fighter1pic ' ] , ' logos/ufc_past/ ' + each [ ' fighter1pic ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] )
except :
pass
try :
if each [ ' fighter2pic ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] not in os . listdir ( ' logos/ufc_past/ ' ) :
urllib . request . urlretrieve ( each [ ' fighter2pic ' ] , ' logos/ufc_past/ ' + each [ ' fighter2pic ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] )
except :
pass
try :
logo_files . append ( each [ ' fighter2pic ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] )
except :
pass
try :
logo_files . append ( each [ ' fighter1pic ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] )
except :
pass
2023-04-18 07:12:24 +00:00
#country flags
2023-04-25 10:57:42 +00:00
try :
if each [ ' fighter1country ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] not in os . listdir ( ' logos/ufc_countries/ ' ) :
urllib . request . urlretrieve ( each [ ' fighter1country ' ] , ' logos/ufc_countries/ ' + each [ ' fighter1country ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] )
except :
pass
try :
if each [ ' fighter2country ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] not in os . listdir ( ' logos/ufc_countries/ ' ) :
urllib . request . urlretrieve ( each [ ' fighter2country ' ] , ' logos/ufc_countries/ ' + each [ ' fighter2country ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] )
except :
pass
2023-04-18 07:12:24 +00:00
for file in os . listdir ( ' logos/ufc_past/ ' ) :
if file not in logo_files :
os . remove ( ' logos/ufc_past/ ' + file )
except :
pass
2023-04-17 09:55:53 +00:00
else :
for d in all_data [ league ] [ ten_or_fifteen ] :
event = { }
event [ ' date ' ] = d [ ' dateEvent ' ]
2023-04-27 10:51:52 +00:00
try :
event [ ' date2 ' ] = d [ ' dateEvent2 ' ]
except :
pass
2023-04-17 09:55:53 +00:00
if time == ' live ' :
event [ ' progess ' ] = d [ ' strProgress ' ]
event [ ' status ' ] = d [ ' strStatus ' ]
2023-01-09 09:59:07 +00:00
else :
2023-04-17 09:55:53 +00:00
if ( league == ' PGA ' ) or ( league == ' LPGA ' ) or ( league == ' PGA_EU ' ) or ( league == ' LIV ' ) or ( league == ' F1 ' ) or ( league == ' NASCAR ' ) :
event [ ' date ' ] = d [ ' dateEvent ' ]
2023-04-27 10:51:52 +00:00
try :
event [ ' date2 ' ] = d [ ' dateEvent2 ' ]
except :
pass
2023-05-17 04:29:23 +00:00
try :
event [ ' total_yards ' ] = d [ ' total_yards ' ]
event [ ' shots_par ' ] = d [ ' shots_par ' ]
event [ ' purse ' ] = d [ ' purse ' ]
except :
2023-05-19 08:25:53 +00:00
pass
2023-04-17 09:55:53 +00:00
event [ ' event ' ] = d [ ' strEvent ' ] . replace ( " \u2019 " , " ' " )
event [ ' venue ' ] = d [ ' strVenue ' ] . replace ( " \u2019 " , " ' " )
event [ ' city ' ] = d [ ' strCity ' ] . replace ( " \u2019 " , " ' " )
event [ ' country ' ] = d [ ' strCountry ' ]
event [ ' season ' ] = d [ ' strSeason ' ]
else :
event [ ' round ' ] = d [ ' intRound ' ]
event [ ' time ' ] = d [ ' strTime ' ]
event [ ' home_team ' ] = d [ ' strHomeTeam ' ]
event [ ' away_team ' ] = d [ ' strAwayTeam ' ]
if time != ' upcoming ' :
if ( league == ' PGA ' ) or ( league == ' LPGA ' ) or ( league == ' PGA_EU ' ) :
2023-05-19 08:25:53 +00:00
# event['golf_standings'] = d['strResult']
2023-05-17 04:29:23 +00:00
event [ ' golf_rankings ' ] = d [ ' player_results ' ]
2023-05-19 08:25:53 +00:00
for player in event [ ' golf_rankings ' ] :
try :
if player [ ' country ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] not in os . listdir ( ' logos/ufc_countries/ ' ) :
urllib . request . urlretrieve ( player [ ' country ' ] , ' logos/ufc_countries/ ' + player [ ' country ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] )
except :
pass
# rank = ['n1', 'n2', 'n3', 'n4', 'n5', 'n6', 'n7', 'n8', 'n9', 'n10', 'T1', 'T2', 'T3', 'T4', 'T5',
# 'T6', 'T7', 'T8', 'T9', 'T10']
# def convert(string):
# string = repr(string).replace('/', '')
# li = list(string.split('\\'))
# return li
# str3 = convert(event['golf_standings'])
# players = []
# for each in str3:
# each = each.replace('nT', 'T', 1)
# if each[:2] in rank:
# try:
# first_space = each.find(' ', 1)
# second_space = each.find(' ', 4)
# first_name = each[first_space:second_space].lstrip()
# initial = first_name[0] + '.'
# each = each.replace(first_name,initial)
# except:
# pass
# interator = each.find('-')
# if interator < 0:
# interator = 0
# interator2 = each[interator:interator + 3]
# result = each.split(interator2, 1)[0] + interator2
# players.append(result.rstrip())
# event['golf_standings'] = players
2023-04-17 09:55:53 +00:00
elif ( league == ' LIV ' ) :
2023-05-19 08:25:53 +00:00
# event['golf_standings'] = d['strResult']
2023-05-17 04:29:23 +00:00
event [ ' golf_rankings ' ] = d [ ' player_results ' ]
2023-05-19 08:25:53 +00:00
for player in event [ ' golf_rankings ' ] :
try :
if player [ ' country ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] not in os . listdir ( ' logos/ufc_countries/ ' ) :
urllib . request . urlretrieve ( player [ ' country ' ] , ' logos/ufc_countries/ ' + player [ ' country ' ] . split ( ' / ' ) [ - 1 ] . split ( ' & ' ) [ 0 ] )
except :
pass
# rank = ['n1', 'n2', 'n3', 'n4', 'n5', 'n6', 'n7', 'n8', 'n9', 'n10', 'T1', 'T2', 'T3', 'T4', 'T5',
# 'T6', 'T7', 'T8', 'T9', 'T10']
# def convert(string):
# string = repr(string).replace('/', '')
# li = list(string.split('\\'))
# return li
# try:
# str3 = convert(event['golf_standings'].split('--------------------------------------')[0])
# strTeams = convert(event['golf_standings'].split('--------------------------------------')[1])
# except:
# pass
# players = []
# teams = []
# try:
# for each in str3:
# each = each.replace('nT', 'T', 1)
# if each[:2] in rank:
# try:
# first_space = each.find(' ', 1)
# second_space = each.find(' ', 4)
# first_name = each[first_space:second_space].lstrip()
# initial = first_name[0] + '.'
# each = each.replace(first_name,initial)
# except:
# pass
# interator = each.find('-')
# if interator < 0:
# interator = 0
# interator2 = each[interator:interator + 3]
# result = each.split(interator2, 1)[0] + interator2
# players.append(result.rstrip())
# for each in strTeams:
# each = each.replace('nT', 'T', 1)
# if each[:2] in rank:
# each = each.split('GC')
# score = each[1].rfind(' ')
# score2 = each[1][score:score+4]
# each2 = each[0] + score2
# teams.append(each2)
# except:
# pass
# event['golf_standings'] = [players] + [teams]
2023-04-17 09:55:53 +00:00
else :
event [ ' away_score ' ] = d [ ' intAwayScore ' ]
event [ ' home_score ' ] = d [ ' intHomeScore ' ]
2023-05-19 08:25:53 +00:00
2023-04-17 09:55:53 +00:00
events . append ( event )
2023-01-09 09:59:07 +00:00
leagues_info [ league . upper ( ) ] = events
all_settings [ ' leagues ' ] = leagues_info
2022-04-07 17:35:38 +00:00
2023-01-09 09:59:07 +00:00
f = open ( " csv/ {} _games.json " . format ( time ) , ' w+ ' )
json . dump ( all_settings , f )
f . close ( )
2022-10-21 09:12:13 +00:00
except :
pass
2023-03-23 08:30:58 +00:00
2022-01-23 10:24:12 +00:00
2022-03-05 14:03:11 +00:00
def updateSports ( api_key , logf ) :
2022-01-23 10:24:12 +00:00
#read user settings to decide which sprots to update
2022-01-29 09:26:55 +00:00
2022-03-05 14:03:11 +00:00
updateLeagueTables ( api_key , logf )
2022-01-23 10:24:12 +00:00
2022-03-05 14:03:11 +00:00
updateLeagueEvents ( api_key , ' livescore ' , logf )
updateLeagueEvents ( api_key , ' past ' , logf )
updateLeagueEvents ( api_key , ' upcoming ' , logf )
2022-01-23 10:24:12 +00:00
def checkStocks ( last_update , update_frequency ) :
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
opening = NY_time . replace ( hour = 9 , minute = 30 , second = 0 , microsecond = 0 ) . replace ( tzinfo = None )
2022-04-07 17:35:38 +00:00
closing = NY_time . replace ( hour = 16 , minute = 5 , second = 0 , microsecond = 0 ) . replace ( tzinfo = None )
2022-01-23 10:24:12 +00:00
f = open ( ' csv/stocks_settings.json ' , ' r ' )
all_stocks_settings = json . load ( f )
f . close ( )
stock_info = all_stocks_settings [ ' symbols ' ]
symbols = list ( stock_info . keys ( ) )
updated = False
diff = ( NY_time - last_update ) . total_seconds ( ) / 60 #minutes
if opening < NY_time < closing and datetime . today ( ) . weekday ( ) < 5 : # we need to do real time updating
if diff > = update_frequency :
updated = True
2022-01-31 19:11:01 +00:00
2022-01-23 10:24:12 +00:00
elif emptyInfo ( symbols , stock_info ) : # if theres any empty stocks
updated = True
2022-01-31 19:11:01 +00:00
2022-01-23 10:24:12 +00:00
else :
# update if last update was before the previous days closing
yday_closing = closing - dt . timedelta ( days = 1 )
yday_str = yday_closing . strftime ( " %d / % m/ % Y % H: % M: % S " )
yday_closing = datetime . strptime ( yday_str , " %d / % m/ % Y % H: % M: % S " )
if last_update < yday_closing :
updated = True
2022-01-31 19:11:01 +00:00
2022-01-23 10:24:12 +00:00
return updated
2022-03-05 14:03:11 +00:00
def updateAll ( api_key , weather_key , logf ) :
updateStocks ( api_key , logf )
2022-03-01 18:27:45 +00:00
2022-03-05 14:03:11 +00:00
updateCrypto ( api_key , logf )
2022-03-01 18:27:45 +00:00
2022-03-06 09:29:42 +00:00
updateForex ( api_key , logf )
2022-03-01 18:27:45 +00:00
2022-03-05 14:03:11 +00:00
updateNews ( api_key , logf )
2022-03-01 18:27:45 +00:00
2022-03-05 14:03:11 +00:00
updateSports ( api_key , logf )
2022-03-01 18:27:45 +00:00
2022-02-21 17:18:45 +00:00
if weather_key :
2022-03-05 14:03:11 +00:00
updateWeather ( weather_key , logf )
2022-01-23 10:24:12 +00:00
2023-01-09 09:59:07 +00:00
past_espn_time = True
past_pl_time = True
2022-01-23 10:24:12 +00:00
if __name__ == ' __main__ ' :
logf = open ( " log.txt " , " a " )
t = time . time ( )
2022-01-31 19:11:01 +00:00
2022-01-23 10:24:12 +00:00
2023-03-30 18:35:22 +00:00
update_frequencies = { ' stocks ' : 2 , ' crypto ' : 7 , ' forex ' : 60 , ' news ' : 120 , ' weather ' : 120 , ' sports ' : 1440 , ' commodities ' : 15 , ' indices ' : 15 , ' movies ' : 1440 , ' ipo ' : 1440 , ' prepost ' : 15 } #minutes
2022-01-23 10:24:12 +00:00
NY_zone = pytz . timezone ( ' America/New_York ' )
2022-03-04 16:37:29 +00:00
CET_zone = pytz . timezone ( ' EST ' )
2022-01-23 10:24:12 +00:00
NY_time = datetime . now ( NY_zone )
CET_time = datetime . now ( CET_zone )
NY_str = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
CET_str = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
#f = open('csv/last_updates.json', 'w+')
#update_times = {'stocks':NY_str, 'crypto':NY_str, 'news':NY_str, 'weather': NY_str, 'forex': CET_str} # all in NY time apart from forex in CET
#json.dump(update_times, f)
#f.close()
2022-01-31 19:11:01 +00:00
f = open ( ' api_keys.txt ' )
api_keys = f . readlines ( )
api_key = api_keys [ 0 ] . strip ( )
2022-02-26 15:50:10 +00:00
2022-02-24 19:42:33 +00:00
2022-01-31 19:11:01 +00:00
try :
weather_key = api_keys [ 1 ] . strip ( )
except Exception as e :
2022-02-21 17:18:45 +00:00
weather_key = False
2022-04-07 17:35:38 +00:00
logf = open ( ' log.txt ' , " a " )
2022-01-31 19:11:01 +00:00
exc_type , exc_obj , exc_tb = sys . exc_info ( )
fname = os . path . split ( exc_tb . tb_frame . f_code . co_filename ) [ 1 ]
logf . write ( str ( e ) )
logf . write ( ' . file: ' + fname )
logf . write ( ' . line: ' + str ( exc_tb . tb_lineno ) )
logf . write ( ' . type: ' + str ( exc_type ) )
logf . write ( ' \n ' + " " . join ( traceback . format_exception ( sys . exc_info ( ) [ 0 ] , sys . exc_info ( ) [ 1 ] , sys . exc_info ( ) [ 2 ] ) ) )
2022-04-07 17:35:38 +00:00
logf . close ( )
2022-01-31 19:11:01 +00:00
2023-01-09 08:23:25 +00:00
try :
movie_key = open ( ' movie_api_key.txt ' ) . readlines ( ) [ 0 ]
except Exception as e :
movie_key = False
2022-03-06 09:29:42 +00:00
2023-03-08 07:23:05 +00:00
try :
ipo_key = open ( ' ipo_api_key.txt ' ) . readlines ( ) [ 0 ]
except Exception as e :
ipo_key = False
2022-01-23 10:24:12 +00:00
t = time . time ( )
2022-02-28 20:16:31 +00:00
update_processes = [ ]
2022-03-06 09:29:42 +00:00
2022-10-06 08:38:12 +00:00
# try:
# time.sleep(60)
# f = open('csv/last_updates.json', 'r')
# last_updates = json.load(f)
# f.close()
# last_updates['stocks']['force'] = True
# #last_updates['weather']['force'] = True
# f = open('csv/last_updates.json', 'w')
# json.dump(last_updates, f)
# f.close()
# except:
# pass
2022-03-06 09:29:42 +00:00
2022-01-23 10:24:12 +00:00
try :
while True :
2022-03-06 09:29:42 +00:00
try :
f = open ( ' csv/last_updates.json ' , ' r ' )
last_updates = json . load ( f )
f . close ( )
except :
2023-04-21 08:46:43 +00:00
last_updates = { " scheduler " : { " force " : False } , " stocks " : { " time " : " 06/03/2022 04:12:09 " , " force " : True } , " crypto " : { " time " : " 06/03/2022 04:10:39 " , " force " : True } ,
2022-03-07 17:34:24 +00:00
" news " : { " time " : " 06/03/2022 04:07:09 " , " force " : True } , " weather " : { " time " : " 06/03/2022 04:08:20 " , " force " : True } ,
" forex " : { " time " : " 06/03/2022 03:54:02 " , " force " : True } , " sports_l " : { " time " : " 06/03/2022 04:10:09 " , " force " : True } ,
" sports_p " : { " time " : " 06/03/2022 04:10:09 " , " force " : True } ,
2023-03-27 15:08:19 +00:00
" sports_u " : { " time " : " 06/03/2022 04:10:09 " , " force " : True } , " sports_t " : { " time " : " 06/03/2022 04:10:09 " , " force " : True } , " commodities " : { " time " : " 06/03/2022 04:10:09 " , " force " : True } , " indices " : { " time " : " 06/03/2022 04:10:09 " , " force " : True } , " movies " : { " time " : " 06/03/2022 04:10:09 " , " force " : True } , " ipo " : { " time " : " 06/03/2022 04:10:09 " , " force " : True } , " prepost " : { " time " : " 06/03/2022 04:10:09 " , " force " : True } }
2022-03-06 09:29:42 +00:00
2023-04-21 08:46:43 +00:00
try :
if last_updates [ ' scheduler ' ] [ ' force ' ] :
try :
f = open ( ' csv/scheduler.json ' , ' r ' )
schedules = json . load ( f )
2023-04-21 08:56:53 +00:00
f . close ( )
2023-04-21 11:10:21 +00:00
shutdown_schedule_hour = schedules [ ' shutdown ' ] [ ' hour ' ]
shutdown_schedule_minute = schedules [ ' shutdown ' ] [ ' minute ' ]
reboot_schedule_hour = schedules [ ' reboot ' ] [ ' hour ' ]
reboot_schedule_minute = schedules [ ' reboot ' ] [ ' minute ' ]
2023-04-21 08:46:43 +00:00
timezone = schedules [ ' timezone ' ]
shutdown_enabled = schedules [ ' shutdown ' ] [ ' enabled ' ]
reboot_enabled = schedules [ ' reboot ' ] [ ' enabled ' ]
except :
2023-04-21 11:10:21 +00:00
shutdown_schedule_hour = " 00 "
shutdown_schedule_minute = " 00 "
reboot_schedule_hour = " 00 "
reboot_schedule_minute = " 00 "
2023-04-21 08:46:43 +00:00
timezone = " GMT "
shutdown_enabled = False
reboot_enabled = False
2023-04-21 11:26:23 +00:00
last_updates [ ' scheduler ' ] [ ' force ' ] = False
2023-04-21 08:46:43 +00:00
except :
pass
#SHUTDOWN
try :
2023-04-21 11:10:21 +00:00
if datetime . now ( pytz . timezone ( timezone ) ) . strftime ( " % H: % M " ) == shutdown_schedule_hour + ' : ' + shutdown_schedule_minute and shutdown_enabled :
2023-04-21 08:46:43 +00:00
os . system ( ' sudo shutdown now ' )
except :
pass
#REBOOT
try :
2023-04-21 11:10:21 +00:00
if datetime . now ( pytz . timezone ( timezone ) ) . strftime ( " % H: % M " ) == reboot_schedule_hour + ' : ' + reboot_schedule_minute and reboot_enabled :
2023-04-21 08:46:43 +00:00
os . system ( ' sudo reboot ' )
except :
pass
2022-01-23 10:24:12 +00:00
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
2022-03-06 09:29:42 +00:00
#msg = getInput()
2022-03-01 18:27:45 +00:00
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
2022-01-23 10:24:12 +00:00
#stocks
2022-03-06 09:29:42 +00:00
stock_time = datetime . strptime ( last_updates [ ' stocks ' ] [ ' time ' ] , " %d / % m/ % Y % H: % M: % S " )
2022-01-23 10:24:12 +00:00
stock_frequency = update_frequencies [ ' stocks ' ]
2022-02-28 20:57:46 +00:00
diff = ( NY_time - stock_time ) . total_seconds ( ) / 60 #minutes
2022-03-06 10:04:03 +00:00
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
opening = NY_time . replace ( hour = 9 , minute = 30 , second = 0 , microsecond = 0 ) . replace ( tzinfo = None )
2022-04-07 17:35:38 +00:00
closing = NY_time . replace ( hour = 16 , minute = 5 , second = 0 , microsecond = 0 ) . replace ( tzinfo = None )
2022-03-06 10:04:03 +00:00
stock_open = opening < NY_time < closing and datetime . today ( ) . weekday ( ) < = 4
if last_updates [ ' stocks ' ] [ ' force ' ] or ( diff > = update_frequencies [ ' stocks ' ] and stock_open ) : # or msg == 's':
2022-01-23 10:24:12 +00:00
stock_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
2022-03-06 09:29:42 +00:00
last_updates [ ' stocks ' ] [ ' time ' ] = stock_time
last_updates [ ' stocks ' ] [ ' force ' ] = False
2022-02-28 20:16:31 +00:00
#updateStocks(api_key)
2022-03-05 14:03:11 +00:00
update_process = Process ( target = updateStocks , args = ( api_key , logf ) )
2022-02-28 20:16:31 +00:00
update_process . start ( )
update_processes . append ( update_process )
2023-03-29 08:46:13 +00:00
NY_time1 = datetime . now ( NY_zone ) . replace ( tzinfo = None )
NY_time2 = datetime . now ( NY_zone ) . replace ( tzinfo = None )
#prepost
preopen = NY_time1 . replace ( hour = 4 , minute = 0 , second = 0 , microsecond = 0 ) . replace ( tzinfo = None )
preclose = NY_time1 . replace ( hour = 9 , minute = 30 , second = 0 , microsecond = 0 ) . replace ( tzinfo = None )
postopen = NY_time2 . replace ( hour = 16 , minute = 0 , second = 0 , microsecond = 0 ) . replace ( tzinfo = None )
postclose = NY_time2 . replace ( hour = 20 , minute = 20 , second = 0 , microsecond = 0 ) . replace ( tzinfo = None )
prepost_frequency = update_frequencies [ ' prepost ' ]
prepost_time = datetime . strptime ( last_updates [ ' prepost ' ] [ ' time ' ] , " %d / % m/ % Y % H: % M: % S " )
pre_open = preopen < NY_time1 < preclose and NY_time1 . weekday ( ) < = 4
post_open = postopen < NY_time2 < postclose and NY_time2 . weekday ( ) < = 4
diff1 = ( NY_time1 - prepost_time ) . total_seconds ( ) / 60 #minutes
diff2 = ( NY_time2 - prepost_time ) . total_seconds ( ) / 60 #minutes
if ( last_updates [ ' prepost ' ] [ ' force ' ] ) or ( diff1 > = update_frequencies [ ' prepost ' ] and pre_open ) or ( diff2 > = update_frequencies [ ' prepost ' ] and post_open ) :
prepost_time = NY_time1 . strftime ( " %d / % m/ % Y % H: % M: % S " )
last_updates [ ' prepost ' ] [ ' time ' ] = prepost_time
last_updates [ ' prepost ' ] [ ' force ' ] = False
update_process = Process ( target = updateStocksPrePost , args = ( api_key , logf ) )
update_process . start ( )
update_processes . append ( update_process )
2022-01-23 10:24:12 +00:00
# crypto
2022-03-06 09:29:42 +00:00
crypto_time = datetime . strptime ( last_updates [ ' crypto ' ] [ ' time ' ] , " %d / % m/ % Y % H: % M: % S " )
2022-01-23 10:24:12 +00:00
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
diff = ( NY_time - crypto_time ) . total_seconds ( ) / 60 #minutes
2022-03-05 11:23:47 +00:00
2022-03-06 09:29:42 +00:00
if last_updates [ ' crypto ' ] [ ' force ' ] or diff > = update_frequencies [ ' crypto ' ] : # or msg == 'c':
2022-01-23 10:24:12 +00:00
crypto_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
2022-03-06 09:29:42 +00:00
#updateCrypto(api_key, logf)
2022-03-05 14:03:11 +00:00
2022-03-06 09:29:42 +00:00
last_updates [ ' crypto ' ] [ ' time ' ] = crypto_time
last_updates [ ' crypto ' ] [ ' force ' ] = False
update_process = Process ( target = updateCrypto , args = ( api_key , logf ) )
update_process . start ( )
update_processes . append ( update_process )
2022-06-12 17:19:14 +00:00
# commodities
commodities_time = datetime . strptime ( last_updates [ ' commodities ' ] [ ' time ' ] , " %d / % m/ % Y % H: % M: % S " )
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
diff = ( NY_time - commodities_time ) . total_seconds ( ) / 60 #minutes
if last_updates [ ' commodities ' ] [ ' force ' ] or diff > = update_frequencies [ ' commodities ' ] : # or msg == 'c':
commodities_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
#updateCrypto(api_key, logf)
last_updates [ ' commodities ' ] [ ' time ' ] = commodities_time
last_updates [ ' commodities ' ] [ ' force ' ] = False
update_process = Process ( target = updateCommodities , args = ( api_key , logf ) )
update_process . start ( )
update_processes . append ( update_process )
2022-10-06 05:00:59 +00:00
# indices
indices_time = datetime . strptime ( last_updates [ ' indices ' ] [ ' time ' ] , " %d / % m/ % Y % H: % M: % S " )
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
diff = ( NY_time - indices_time ) . total_seconds ( ) / 60 #minutes
if last_updates [ ' indices ' ] [ ' force ' ] or diff > = update_frequencies [ ' indices ' ] : # or msg == 'c':
indices_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
last_updates [ ' indices ' ] [ ' time ' ] = indices_time
last_updates [ ' indices ' ] [ ' force ' ] = False
update_process = Process ( target = updateIndices , args = ( api_key , logf ) )
update_process . start ( )
update_processes . append ( update_process )
2023-01-09 08:23:25 +00:00
# movies
movies_time = datetime . strptime ( last_updates [ ' movies ' ] [ ' time ' ] , " %d / % m/ % Y % H: % M: % S " )
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
diff = ( NY_time - movies_time ) . total_seconds ( ) / 60 #minutes
if last_updates [ ' movies ' ] [ ' force ' ] or diff > = update_frequencies [ ' movies ' ] :
movies_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
last_updates [ ' movies ' ] [ ' time ' ] = movies_time
last_updates [ ' movies ' ] [ ' force ' ] = False
update_process = Process ( target = updateMovies , args = ( movie_key , logf ) )
update_process . start ( )
update_processes . append ( update_process )
2023-03-08 07:25:34 +00:00
# ipos
ipo_time = datetime . strptime ( last_updates [ ' ipo ' ] [ ' time ' ] , " %d / % m/ % Y % H: % M: % S " )
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
diff = ( NY_time - ipo_time ) . total_seconds ( ) / 60 #minutes
if last_updates [ ' ipo ' ] [ ' force ' ] or diff > = update_frequencies [ ' ipo ' ] :
ipo_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
last_updates [ ' ipo ' ] [ ' time ' ] = ipo_time
last_updates [ ' ipo ' ] [ ' force ' ] = False
update_process = Process ( target = updateIpo , args = ( ipo_key , logf ) )
update_process . start ( )
update_processes . append ( update_process )
2022-01-23 10:24:12 +00:00
# weather
2022-03-06 09:29:42 +00:00
weather_time = datetime . strptime ( last_updates [ ' weather ' ] [ ' time ' ] , " %d / % m/ % Y % H: % M: % S " )
2022-01-23 10:24:12 +00:00
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
diff = ( NY_time - weather_time ) . total_seconds ( ) / 60 #minutes
2022-03-06 09:29:42 +00:00
if last_updates [ ' weather ' ] [ ' force ' ] or diff > = update_frequencies [ ' weather ' ] : # or msg == 'w':
2022-01-23 10:24:12 +00:00
weather_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
2022-02-28 20:16:31 +00:00
#updateWeather(weather_key)
2022-03-06 09:29:42 +00:00
last_updates [ ' weather ' ] [ ' time ' ] = weather_time
last_updates [ ' weather ' ] [ ' force ' ] = False
2022-03-05 14:03:11 +00:00
update_process = Process ( target = updateWeather , args = ( weather_key , logf ) )
2022-02-28 20:16:31 +00:00
update_process . start ( )
update_processes . append ( update_process )
2022-01-23 10:24:12 +00:00
# news
2022-03-06 09:29:42 +00:00
news_time = datetime . strptime ( last_updates [ ' news ' ] [ ' time ' ] , " %d / % m/ % Y % H: % M: % S " )
2022-01-23 10:24:12 +00:00
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
diff = ( NY_time - news_time ) . total_seconds ( ) / 60 #minutes
2022-03-06 09:29:42 +00:00
if last_updates [ ' news ' ] [ ' force ' ] or diff > = update_frequencies [ ' news ' ] : # or msg == 'n':
2022-01-23 10:24:12 +00:00
news_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
2022-02-28 20:16:31 +00:00
#updateNews(api_key)
2022-03-06 09:29:42 +00:00
last_updates [ ' news ' ] [ ' time ' ] = news_time
last_updates [ ' news ' ] [ ' force ' ] = False
2022-03-05 14:03:11 +00:00
update_process = Process ( target = updateNews , args = ( api_key , logf ) )
2022-02-28 20:16:31 +00:00
update_process . start ( )
update_processes . append ( update_process )
2022-03-07 17:34:24 +00:00
# sports upcoming
sports_time = datetime . strptime ( last_updates [ ' sports_u ' ] [ ' time ' ] , " %d / % m/ % Y % H: % M: % S " )
2022-01-23 10:24:12 +00:00
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
diff = ( NY_time - sports_time ) . total_seconds ( ) / 60 #minutes
2022-03-07 17:34:24 +00:00
if last_updates [ ' sports_u ' ] [ ' force ' ] or diff > = update_frequencies [ ' sports ' ] : # or msg == 'S':
2022-01-23 10:24:12 +00:00
sports_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
2022-02-28 20:16:31 +00:00
#updateSports(api_key)
2022-03-07 17:34:24 +00:00
last_updates [ ' sports_u ' ] [ ' time ' ] = sports_time
last_updates [ ' sports_u ' ] [ ' force ' ] = False
update_process = Process ( target = updateLeagueEvents , args = ( api_key , ' upcoming ' , logf ) )
2022-02-28 20:16:31 +00:00
update_process . start ( )
update_processes . append ( update_process )
2022-03-07 17:34:24 +00:00
# sports live
sports_time = datetime . strptime ( last_updates [ ' sports_l ' ] [ ' time ' ] , " %d / % m/ % Y % H: % M: % S " )
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
2023-01-09 09:59:07 +00:00
espn_time = " 17:00Z "
espn_time_est = " 12:00 "
# if datetime.now(pytz.utc).strftime("%H:%MZ") < espn_time:
if datetime . now ( pytz . timezone ( ' America/New_York ' ) ) . strftime ( " % H: % M " ) < espn_time_est :
past_espn_time = True
if last_updates [ ' sports_l ' ] [ ' force ' ] or ( datetime . now ( pytz . timezone ( ' America/New_York ' ) ) . strftime ( " % H: % M " ) > = espn_time_est and past_espn_time ) : # or msg == 'S':
# if last_updates['sports_l']['force'] or (datetime.now(pytz.utc).strftime("%H:%MZ") >= espn_time and past_espn_time):# or msg == 'S':
2022-03-07 17:34:24 +00:00
sports_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
last_updates [ ' sports_l ' ] [ ' time ' ] = sports_time
last_updates [ ' sports_l ' ] [ ' force ' ] = False
2023-01-09 09:59:07 +00:00
past_espn_time = False
2022-03-07 17:34:24 +00:00
update_process = Process ( target = updateLeagueEvents , args = ( api_key , ' livescore ' , logf ) )
update_process . start ( )
update_processes . append ( update_process )
2023-01-09 09:59:07 +00:00
#sports live (premier league)
pl_time = " 12:00Z "
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
sports_time = datetime . strptime ( last_updates [ ' sports_l ' ] [ ' time ' ] , " %d / % m/ % Y % H: % M: % S " )
if datetime . now ( pytz . utc ) . strftime ( " % H: % MZ " ) < pl_time :
past_pl_time = True
if datetime . now ( pytz . utc ) . strftime ( " % H: % MZ " ) > = pl_time and past_pl_time : # or msg == 'S':
sports_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
past_pl_time = False
last_updates [ ' sports_l ' ] [ ' time ' ] = sports_time
last_updates [ ' sports_l ' ] [ ' force ' ] = False
update_process = Process ( target = updatePLtime )
update_process . start ( )
update_processes . append ( update_process )
2022-03-07 17:34:24 +00:00
# sports past
sports_time = datetime . strptime ( last_updates [ ' sports_p ' ] [ ' time ' ] , " %d / % m/ % Y % H: % M: % S " )
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
diff = ( NY_time - sports_time ) . total_seconds ( ) / 60 #minutes
if last_updates [ ' sports_p ' ] [ ' force ' ] or diff > = update_frequencies [ ' sports ' ] : # or msg == 'S':
sports_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
#updateSports(api_key)
last_updates [ ' sports_p ' ] [ ' time ' ] = sports_time
last_updates [ ' sports_p ' ] [ ' force ' ] = False
update_process = Process ( target = updateLeagueEvents , args = ( api_key , ' past ' , logf ) )
update_process . start ( )
update_processes . append ( update_process )
# sports table
sports_time = datetime . strptime ( last_updates [ ' sports_t ' ] [ ' time ' ] , " %d / % m/ % Y % H: % M: % S " )
NY_time = datetime . now ( NY_zone ) . replace ( tzinfo = None )
diff = ( NY_time - sports_time ) . total_seconds ( ) / 60 #minutes
if last_updates [ ' sports_t ' ] [ ' force ' ] or diff > = update_frequencies [ ' sports ' ] : # or msg == 'S':
sports_time = NY_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
#updateSports(api_key)
last_updates [ ' sports_t ' ] [ ' time ' ] = sports_time
last_updates [ ' sports_t ' ] [ ' force ' ] = False
update_process = Process ( target = updateLeagueTables , args = ( api_key , logf ) )
update_process . start ( )
update_processes . append ( update_process )
2022-01-23 10:24:12 +00:00
#forex updates once every 24hours at 1700 CET
# update if last update was before the previous days closing
2022-03-06 09:29:42 +00:00
forex_time = datetime . strptime ( last_updates [ ' forex ' ] [ ' time ' ] , " %d / % m/ % Y % H: % M: % S " )
2022-03-04 16:32:41 +00:00
CET_time = datetime . now ( CET_zone ) . replace ( tzinfo = None )
2022-01-23 10:24:12 +00:00
yday_update = ( CET_time . replace ( hour = 17 , minute = 00 , second = 0 , microsecond = 0 ) - dt . timedelta ( days = 1 ) ) . replace ( tzinfo = None )
diff = ( CET_time . replace ( tzinfo = None ) - forex_time ) . total_seconds ( ) / 60
opening = CET_time . replace ( hour = 17 , minute = 0 , second = 0 , microsecond = 0 ) . replace ( tzinfo = None )
#forex updates between 5pm sunday and 5pm friday every hour
forex_open = datetime . today ( ) . weekday ( ) < 4 or ( datetime . today ( ) . weekday ( ) == 6 and CET_time > opening ) or ( datetime . today ( ) . weekday ( ) == 4 and CET_time < opening )
2022-03-06 09:29:42 +00:00
if last_updates [ ' forex ' ] [ ' force ' ] or ( diff > = update_frequencies [ ' forex ' ] and forex_open ) : # or msg == 'f':
2022-01-23 10:24:12 +00:00
forex_time = CET_time . strftime ( " %d / % m/ % Y % H: % M: % S " )
2022-03-06 09:29:42 +00:00
last_updates [ ' forex ' ] [ ' time ' ] = forex_time
last_updates [ ' forex ' ] [ ' force ' ] = False
2022-02-28 20:16:31 +00:00
#updateForex(api_key)
2022-03-05 14:03:11 +00:00
update_process = Process ( target = updateForex , args = ( api_key , logf ) )
2022-02-28 20:16:31 +00:00
update_process . start ( )
update_processes . append ( update_process )
2022-01-23 10:24:12 +00:00
2022-03-06 09:29:42 +00:00
f = open ( ' csv/last_updates.json ' , ' w+ ' )
json . dump ( last_updates , f )
f . close ( )
2022-02-28 20:16:31 +00:00
for process in update_processes :
if not process . is_alive ( ) :
process . join ( )
process . terminate ( )
2022-03-05 14:03:11 +00:00
update_processes . remove ( process )
2022-04-07 17:35:38 +00:00
2022-03-06 09:29:42 +00:00
time . sleep ( 10 )
2022-01-23 10:24:12 +00:00
2022-10-21 09:12:13 +00:00
except :
pass
2022-04-07 17:35:38 +00:00
2022-03-07 17:43:34 +00:00
2022-10-21 09:12:13 +00:00
# logf = open('log.txt', "a")
# exc_type, exc_obj, exc_tb = sys.exc_info()
# fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
# logf.write(str(e))
# logf.write('. file: ' + fname)
# logf.write('. line: ' + str(exc_tb.tb_lineno))
# logf.write('. type: ' + str(exc_type))
# logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
# logf.close()
2022-01-23 10:24:12 +00:00
2022-03-05 14:03:11 +00:00
2022-01-23 10:24:12 +00:00
2022-04-07 17:35:38 +00:00