1
csv/ipo_settings.json
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"feature": "IPO", "speed": "medium", "speed2": "medium", "animation": "down", "title": true, "symbols": ["No Data"]}
|
@ -1 +1 @@
|
|||||||
{"stocks": {"time": "14/06/2022 06:42:06", "force": false}, "crypto": {"time": "14/06/2022 06:58:18", "force": false}, "news": {"time": "14/06/2022 05:29:08", "force": false}, "weather": {"time": "14/06/2022 05:29:08", "force": false}, "forex": {"time": "14/06/2022 05:29:14", "force": false}, "sports_l": {"time": "14/06/2022 04:42:37", "force": false}, "sports_p": {"time": "14/06/2022 06:27:34", "force": false}, "sports_u": {"time": "14/06/2022 06:28:34", "force": false}, "sports_t": {"time": "14/06/2022 06:26:23", "force": false}, "commodities": {"time": "14/06/2022 06:51:07", "force": false}, "indices": {"time": "05/10/2022 04:06:10", "force": false}, "movies": {"time": "05/10/2022 02:31:40", "force": false}}
|
{"scheduler":{"force": false}, "stocks": {"time": "14/06/2022 06:42:06", "force": false}, "crypto": {"time": "14/06/2022 06:58:18", "force": false}, "news": {"time": "14/06/2022 05:29:08", "force": false}, "weather": {"time": "14/06/2022 05:29:08", "force": false}, "forex": {"time": "14/06/2022 05:29:14", "force": false}, "sports_l": {"time": "14/06/2022 04:42:37", "force": false}, "sports_p": {"time": "14/06/2022 06:27:34", "force": false}, "sports_u": {"time": "14/06/2022 06:28:34", "force": false}, "sports_t": {"time": "14/06/2022 06:26:23", "force": false}, "commodities": {"time": "14/06/2022 06:51:07", "force": false}, "indices": {"time": "05/10/2022 04:06:10", "force": false}, "movies": {"time": "05/10/2022 02:31:40", "force": false}, "ipo": {"time": "05/10/2022 02:31:40", "force": false}, "prepost": {"time": "05/10/2022 02:31:40", "force": false}}
|
||||||
|
@ -28,6 +28,7 @@ try:
|
|||||||
last_updates = json.load(f)
|
last_updates = json.load(f)
|
||||||
f.close()
|
f.close()
|
||||||
last_updates['stocks']['force'] = True
|
last_updates['stocks']['force'] = True
|
||||||
|
last_updates['prepost']['force'] = True
|
||||||
last_updates['sports_l']['force'] = True
|
last_updates['sports_l']['force'] = True
|
||||||
f = open('csv/last_updates.json', 'w')
|
f = open('csv/last_updates.json', 'w')
|
||||||
json.dump(last_updates, f)
|
json.dump(last_updates, f)
|
||||||
@ -35,6 +36,31 @@ try:
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
f = open('csv/scheduler.json', 'r')
|
||||||
|
schedules = json.load(f)
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
shutdown_schedule_hour = schedules['shutdown']['hour']
|
||||||
|
shutdown_schedule_minute = schedules['shutdown']['minute']
|
||||||
|
|
||||||
|
reboot_schedule_hour = schedules['reboot']['hour']
|
||||||
|
reboot_schedule_minute = schedules['reboot']['minute']
|
||||||
|
|
||||||
|
timezone = schedules['timezone']
|
||||||
|
shutdown_enabled = schedules['shutdown']['enabled']
|
||||||
|
reboot_enabled = schedules['reboot']['enabled']
|
||||||
|
except:
|
||||||
|
shutdown_schedule_hour = "00"
|
||||||
|
shutdown_schedule_minute = "00"
|
||||||
|
|
||||||
|
reboot_schedule_hour = "00"
|
||||||
|
reboot_schedule_minute = "00"
|
||||||
|
|
||||||
|
timezone = "GMT"
|
||||||
|
shutdown_enabled = False
|
||||||
|
reboot_enabled = False
|
||||||
|
|
||||||
|
|
||||||
def getInput(Block=False):
|
def getInput(Block=False):
|
||||||
if Block or select.select([sys.stdin], [], [], 0) == ([sys.stdin], [], []):
|
if Block or select.select([sys.stdin], [], [], 0) == ([sys.stdin], [], []):
|
||||||
@ -112,6 +138,71 @@ def updateStocks(api_key, logf):
|
|||||||
#logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
#logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
||||||
#logf.close()
|
#logf.close()
|
||||||
|
|
||||||
|
|
||||||
|
def updateStocksPrePost(api_key, logf):
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
f = open('csv/stocks_settings.json', 'r')
|
||||||
|
all_stocks_settings = json.load(f)
|
||||||
|
f.close()
|
||||||
|
stock_info = all_stocks_settings['symbols']
|
||||||
|
symbols = list(stock_info.keys())
|
||||||
|
|
||||||
|
#KEEP THIS JUST IN CASE V7 GOES DOWN prepost_url = 'https://query2.finance.yahoo.com/v6/finance/quote?symbols='
|
||||||
|
prepost_url = 'https://query2.finance.yahoo.com/v7/finance/quote?symbols='
|
||||||
|
for symbol in symbols:
|
||||||
|
prepost_url += symbol + ','
|
||||||
|
|
||||||
|
prepost_url += '&fields=regularMarketPreviousClose,regularMarketPrice,preMarketPrice,preMarketChangePercent,regularMarketChangePercent,regularMarketChange,preMarketChange,postMarketPrice,postMarketChange,postMarketChangePercent®ion=US&lang=en-US'
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36'
|
||||||
|
}
|
||||||
|
prepost = requests.get(prepost_url, headers=headers)
|
||||||
|
|
||||||
|
if 'Unauthorized' in str(prepost.json()):
|
||||||
|
prepost = requests.get(prepost_url.replace('v7','v6'), headers=headers)
|
||||||
|
|
||||||
|
prepost_data = prepost.json()['quoteResponse']['result']
|
||||||
|
time_now = datetime.now(pytz.timezone('America/New_York')).strftime("%H:%M EST")
|
||||||
|
|
||||||
|
if len(prepost_data) > 0:
|
||||||
|
for symbol in symbols:
|
||||||
|
for stock in prepost_data:
|
||||||
|
if stock['symbol'] == symbol:
|
||||||
|
stock_info[stock['symbol']] = {"time_now":time_now}
|
||||||
|
try:
|
||||||
|
stock_info[stock['symbol']]['Pre-market'] = {'preprice': '%.2f' % stock['preMarketPrice'],
|
||||||
|
'prechange': '%.2f' % stock['preMarketChange'],
|
||||||
|
'prepercent': '%.2f' % stock['preMarketChangePercent']}
|
||||||
|
except:
|
||||||
|
try:
|
||||||
|
stock_info[stock['symbol']]['Pre-market'] = {'preprice': '%.2f' % stock['postMarketPrice'],
|
||||||
|
'prechange': '%.2f' % 0,
|
||||||
|
'prepercent': '%.2f' % 0}
|
||||||
|
except:
|
||||||
|
stock_info[stock['symbol']]['Pre-market'] = {'preprice': '%.2f' % stock['regularMarketPrice'],
|
||||||
|
'prechange': '%.2f' % 0,
|
||||||
|
'prepercent': '%.2f' % 0}
|
||||||
|
try:
|
||||||
|
stock_info[stock['symbol']]['Post-market'] = {'postprice': '%.2f' % stock['postMarketPrice'],
|
||||||
|
'postchange': '%.2f' % stock['postMarketChange'],
|
||||||
|
'postpercent': '%.2f' % stock['postMarketChangePercent']}
|
||||||
|
except:
|
||||||
|
stock_info[stock['symbol']]['Post-market'] = {'postprice': '%.2f' % stock['regularMarketPrice'],
|
||||||
|
'postchange': '%.2f' % 0,
|
||||||
|
'postpercent': '%.2f' % 0}
|
||||||
|
|
||||||
|
all_stocks_settings['symbols'] = stock_info
|
||||||
|
|
||||||
|
with open('csv/prepost_settings.json', 'w+') as f:
|
||||||
|
json.dump(all_stocks_settings['symbols'], f)
|
||||||
|
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def updateCommodities(api_key, logf):
|
def updateCommodities(api_key, logf):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -292,6 +383,60 @@ def updateMovies(api_key, logf):
|
|||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
|
|
||||||
|
def updateIpo(api_key, logf):
|
||||||
|
|
||||||
|
day = datetime.now(pytz.utc).strftime("%Y-%m-%d")
|
||||||
|
dt = datetime.strptime(day, "%Y-%m-%d")
|
||||||
|
|
||||||
|
start = (dt - timedelta(days=dt.weekday()))
|
||||||
|
start_date = start.strftime("%Y-%m-%d")
|
||||||
|
|
||||||
|
end = start + timedelta(days=21)
|
||||||
|
end_date = end.strftime("%Y-%m-%d")
|
||||||
|
|
||||||
|
ipo_url = 'https://finnhub.io/api/v1/calendar/ipo?from='+start_date+'&to='+end_date+'&token='+api_key
|
||||||
|
|
||||||
|
f = open('csv/ipo_settings.json', 'r')
|
||||||
|
ipo_settings = json.load(f)
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
data = requests.get(ipo_url)
|
||||||
|
all_ipo = data.json()
|
||||||
|
|
||||||
|
ipo_list = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
if len(all_ipo['ipoCalendar']) > 0:
|
||||||
|
for ipo in all_ipo['ipoCalendar']:
|
||||||
|
try:
|
||||||
|
shares = human_format(ipo['numberOfShares'])
|
||||||
|
except:
|
||||||
|
shares = 'N/A'
|
||||||
|
try:
|
||||||
|
sharesvalue = human_format(ipo['totalSharesValue'])
|
||||||
|
except:
|
||||||
|
sharesvalue = 'N/A'
|
||||||
|
|
||||||
|
ipo_list.append({
|
||||||
|
'date':ipo['date'],
|
||||||
|
'name':ipo['name'],
|
||||||
|
'shares':shares,
|
||||||
|
'price':ipo['price'],
|
||||||
|
'status':ipo['status'],
|
||||||
|
'symbol':ipo['symbol'],
|
||||||
|
'sharesvalue':sharesvalue
|
||||||
|
})
|
||||||
|
else:
|
||||||
|
ipo_list = ['No Data']
|
||||||
|
except:
|
||||||
|
ipo_list = ['No Data']
|
||||||
|
|
||||||
|
ipo_settings['symbols'] = ipo_list
|
||||||
|
f = open('csv/ipo_settings.json', 'w+')
|
||||||
|
json.dump(ipo_settings, f)
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
|
||||||
def updateIndices(api_key, logf):
|
def updateIndices(api_key, logf):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -462,24 +607,16 @@ def updateForex(api_key, logf):
|
|||||||
|
|
||||||
def updateNews(api_key, logf):
|
def updateNews(api_key, logf):
|
||||||
|
|
||||||
#'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news?category=technology'
|
|
||||||
#'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news?country=GB'
|
|
||||||
#'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news?lang=en'
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
f = open('csv/news_settings.json', 'r')
|
f = open('csv/news_settings.json', 'r')
|
||||||
all_settings = json.load(f)
|
all_settings = json.load(f)
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if all_settings['use_country']:
|
if all_settings['use_country']:
|
||||||
if all_settings['country'] == 'Worldwide':
|
if all_settings['country'] == 'Worldwide':
|
||||||
url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news_worldwide?'
|
url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news_worldwide?'
|
||||||
else:
|
else:
|
||||||
c_dict = {'United States':'US', 'Australia':'AU', 'Canada': 'CA', 'Great Britain':'GB', 'New Zealand':'NZ', 'Ireland':'IE', 'Singapore':'SG', 'South Africa': 'ZA'}
|
c_dict = {'United States':'US', 'Australia':'AU', 'Canada': 'CA', 'Great Britain':'GB', 'New Zealand':'NZ', 'Ireland':'IE', 'Singapore':'SG', 'South Africa': 'ZA', 'Germany': 'DE', 'Hong Kong': 'HK', 'Japan': 'JP', 'South Korea': 'KR', 'China': 'CN', 'France': 'FR', 'India': 'IN', 'Italy': 'IT', 'Switzerland': 'CH', 'Netherlands': 'NL', 'Spain': 'ES', 'Brazil': 'BR', 'Portugal': 'PT'}
|
||||||
cc = c_dict[all_settings['country']]
|
cc = c_dict[all_settings['country']]
|
||||||
url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news?country={}'.format(cc)
|
url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news?country={}'.format(cc)
|
||||||
elif all_settings['use_category']:
|
elif all_settings['use_category']:
|
||||||
@ -494,8 +631,6 @@ def updateNews(api_key, logf):
|
|||||||
#load user settings
|
#load user settings
|
||||||
headlines = data[:max_headlines]
|
headlines = data[:max_headlines]
|
||||||
headline_sources = [headline['source'] for headline in headlines]
|
headline_sources = [headline['source'] for headline in headlines]
|
||||||
|
|
||||||
|
|
||||||
headline_titles = [headline['title'] for headline in headlines]
|
headline_titles = [headline['title'] for headline in headlines]
|
||||||
|
|
||||||
headline_times = [headline['publishedAt'] for headline in headlines]
|
headline_times = [headline['publishedAt'] for headline in headlines]
|
||||||
@ -801,20 +936,111 @@ def updateLeagueEvents(api_key, time, logf):
|
|||||||
for league in all_data.keys():
|
for league in all_data.keys():
|
||||||
ten_or_fifteen = slice(None)
|
ten_or_fifteen = slice(None)
|
||||||
events = []
|
events = []
|
||||||
if (league == 'PGA') or (league == 'LPGA') or (league == 'PGA_EU'):
|
if (league == 'PGA') or (league == 'LPGA') or (league == 'PGA_EU') or (league == 'LIV') or (league == 'F1') or (league == 'NASCAR'):
|
||||||
ten_or_fifteen = slice(3)
|
ten_or_fifteen = slice(3)
|
||||||
else:
|
else:
|
||||||
ten_or_fifteen = slice(None)
|
ten_or_fifteen = slice(None)
|
||||||
|
|
||||||
|
if league == 'UFC':
|
||||||
|
event = all_data['UFC'][0]
|
||||||
|
events.append(event)
|
||||||
|
if time == 'upcoming':
|
||||||
|
try:
|
||||||
|
logo_files = []
|
||||||
|
for each in all_data['UFC'][0]['fights']:
|
||||||
|
try:
|
||||||
|
if each['fighter1pic'].split('/')[-1].split('&')[0] not in os.listdir('logos/ufc/'):
|
||||||
|
urllib.request.urlretrieve(each['fighter1pic'],'logos/ufc/' + each['fighter1pic'].split('/')[-1].split('&')[0])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
if each['fighter2pic'].split('/')[-1].split('&')[0] not in os.listdir('logos/ufc/'):
|
||||||
|
urllib.request.urlretrieve(each['fighter2pic'],'logos/ufc/' + each['fighter2pic'].split('/')[-1].split('&')[0])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
logo_files.append(each['fighter2pic'].split('/')[-1].split('&')[0])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
logo_files.append(each['fighter1pic'].split('/')[-1].split('&')[0])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
#country flags
|
||||||
|
try:
|
||||||
|
if each['fighter1country'].split('/')[-1].split('&')[0] not in os.listdir('logos/ufc_countries/'):
|
||||||
|
urllib.request.urlretrieve(each['fighter1country'], 'logos/ufc_countries/' + each['fighter1country'].split('/')[-1].split('&')[0])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
if each['fighter2country'].split('/')[-1].split('&')[0] not in os.listdir('logos/ufc_countries/'):
|
||||||
|
urllib.request.urlretrieve(each['fighter2country'], 'logos/ufc_countries/' + each['fighter2country'].split('/')[-1].split('&')[0])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
for file in os.listdir('logos/ufc/'):
|
||||||
|
if file not in logo_files:
|
||||||
|
os.remove('logos/ufc/'+ file)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
elif time == 'past':
|
||||||
|
try:
|
||||||
|
logo_files = []
|
||||||
|
for each in all_data['UFC'][0]['fights']:
|
||||||
|
try:
|
||||||
|
if each['fighter1pic'].split('/')[-1].split('&')[0] not in os.listdir('logos/ufc_past/'):
|
||||||
|
urllib.request.urlretrieve(each['fighter1pic'],'logos/ufc_past/' + each['fighter1pic'].split('/')[-1].split('&')[0])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
if each['fighter2pic'].split('/')[-1].split('&')[0] not in os.listdir('logos/ufc_past/'):
|
||||||
|
urllib.request.urlretrieve(each['fighter2pic'],'logos/ufc_past/' + each['fighter2pic'].split('/')[-1].split('&')[0])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
logo_files.append(each['fighter2pic'].split('/')[-1].split('&')[0])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
logo_files.append(each['fighter1pic'].split('/')[-1].split('&')[0])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
#country flags
|
||||||
|
try:
|
||||||
|
if each['fighter1country'].split('/')[-1].split('&')[0] not in os.listdir('logos/ufc_countries/'):
|
||||||
|
urllib.request.urlretrieve(each['fighter1country'], 'logos/ufc_countries/' + each['fighter1country'].split('/')[-1].split('&')[0])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
if each['fighter2country'].split('/')[-1].split('&')[0] not in os.listdir('logos/ufc_countries/'):
|
||||||
|
urllib.request.urlretrieve(each['fighter2country'], 'logos/ufc_countries/' + each['fighter2country'].split('/')[-1].split('&')[0])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
for file in os.listdir('logos/ufc_past/'):
|
||||||
|
if file not in logo_files:
|
||||||
|
os.remove('logos/ufc_past/'+ file)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
for d in all_data[league][ten_or_fifteen]:
|
for d in all_data[league][ten_or_fifteen]:
|
||||||
event = {}
|
event = {}
|
||||||
event['date'] = d['dateEvent']
|
event['date'] = d['dateEvent']
|
||||||
|
try:
|
||||||
|
event['date2'] = d['dateEvent2']
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
if time == 'live':
|
if time == 'live':
|
||||||
event['progess'] = d['strProgress']
|
event['progess'] = d['strProgress']
|
||||||
event['status'] = d['strStatus']
|
event['status'] = d['strStatus']
|
||||||
else:
|
else:
|
||||||
if (league == 'PGA') or (league == 'LPGA') or (league == 'PGA_EU'):
|
if (league == 'PGA') or (league == 'LPGA') or (league == 'PGA_EU') or (league == 'LIV') or (league == 'F1') or (league == 'NASCAR'):
|
||||||
event['date'] = d['dateEvent']
|
event['date'] = d['dateEvent']
|
||||||
|
try:
|
||||||
|
event['date2'] = d['dateEvent2']
|
||||||
|
except:
|
||||||
|
pass
|
||||||
event['event'] = d['strEvent'].replace("\u2019","'")
|
event['event'] = d['strEvent'].replace("\u2019","'")
|
||||||
event['venue'] = d['strVenue'].replace("\u2019","'")
|
event['venue'] = d['strVenue'].replace("\u2019","'")
|
||||||
event['city'] = d['strCity'].replace("\u2019","'")
|
event['city'] = d['strCity'].replace("\u2019","'")
|
||||||
@ -862,6 +1088,60 @@ def updateLeagueEvents(api_key, time, logf):
|
|||||||
players.append(result.rstrip())
|
players.append(result.rstrip())
|
||||||
|
|
||||||
event['golf_standings'] = players
|
event['golf_standings'] = players
|
||||||
|
|
||||||
|
|
||||||
|
elif (league == 'LIV'):
|
||||||
|
event['golf_standings'] = d['strResult']
|
||||||
|
|
||||||
|
rank = ['n1', 'n2', 'n3', 'n4', 'n5', 'n6', 'n7', 'n8', 'n9', 'n10', 'T1', 'T2', 'T3', 'T4', 'T5',
|
||||||
|
'T6', 'T7', 'T8', 'T9', 'T10']
|
||||||
|
|
||||||
|
def convert(string):
|
||||||
|
string = repr(string).replace('/', '')
|
||||||
|
li = list(string.split('\\'))
|
||||||
|
return li
|
||||||
|
|
||||||
|
try:
|
||||||
|
str3 = convert(event['golf_standings'].split('--------------------------------------')[0])
|
||||||
|
strTeams = convert(event['golf_standings'].split('--------------------------------------')[1])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
players = []
|
||||||
|
teams = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
for each in str3:
|
||||||
|
each = each.replace('nT', 'T', 1)
|
||||||
|
if each[:2] in rank:
|
||||||
|
try:
|
||||||
|
first_space = each.find(' ', 1)
|
||||||
|
second_space = each.find(' ', 4)
|
||||||
|
first_name = each[first_space:second_space].lstrip()
|
||||||
|
initial = first_name[0] + '.'
|
||||||
|
each = each.replace(first_name,initial)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
interator = each.find('-')
|
||||||
|
if interator < 0:
|
||||||
|
interator = 0
|
||||||
|
interator2 = each[interator:interator + 3]
|
||||||
|
result = each.split(interator2, 1)[0] + interator2
|
||||||
|
players.append(result.rstrip())
|
||||||
|
|
||||||
|
for each in strTeams:
|
||||||
|
each = each.replace('nT', 'T', 1)
|
||||||
|
if each[:2] in rank:
|
||||||
|
each = each.split('GC')
|
||||||
|
score = each[1].rfind(' ')
|
||||||
|
score2 = each[1][score:score+4]
|
||||||
|
each2 = each[0] + score2
|
||||||
|
teams.append(each2)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
event['golf_standings'] = [players] + [teams]
|
||||||
|
|
||||||
else:
|
else:
|
||||||
event['away_score'] = d['intAwayScore']
|
event['away_score'] = d['intAwayScore']
|
||||||
event['home_score'] = d['intHomeScore']
|
event['home_score'] = d['intHomeScore']
|
||||||
@ -875,15 +1155,7 @@ def updateLeagueEvents(api_key, time, logf):
|
|||||||
f.close()
|
f.close()
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
#logf = open('log.txt', "a")
|
|
||||||
#exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
||||||
#fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
|
|
||||||
#logf.write(str(e))
|
|
||||||
#logf.write('. file: ' + fname)
|
|
||||||
#logf.write('. line: ' + str(exc_tb.tb_lineno))
|
|
||||||
#logf.write('. type: ' + str(exc_type))
|
|
||||||
#logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
|
|
||||||
#logf.close()
|
|
||||||
|
|
||||||
|
|
||||||
def updateSports(api_key, logf):
|
def updateSports(api_key, logf):
|
||||||
@ -967,7 +1239,7 @@ if __name__ == '__main__':
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
update_frequencies = {'stocks':2, 'crypto':5, 'forex':60, 'news':120, 'weather': 120, 'sports': 1440, 'commodities': 15, 'indices': 15, 'movies': 1440} #minutes
|
update_frequencies = {'stocks':2, 'crypto':7, 'forex':60, 'news':120, 'weather': 120, 'sports': 1440, 'commodities': 15, 'indices': 15, 'movies': 1440, 'ipo': 1440, 'prepost': 15} #minutes
|
||||||
|
|
||||||
NY_zone = pytz.timezone('America/New_York')
|
NY_zone = pytz.timezone('America/New_York')
|
||||||
CET_zone = pytz.timezone('EST')
|
CET_zone = pytz.timezone('EST')
|
||||||
@ -1009,6 +1281,10 @@ if __name__ == '__main__':
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
movie_key = False
|
movie_key = False
|
||||||
|
|
||||||
|
try:
|
||||||
|
ipo_key = open('ipo_api_key.txt').readlines()[0]
|
||||||
|
except Exception as e:
|
||||||
|
ipo_key = False
|
||||||
|
|
||||||
t = time.time()
|
t = time.time()
|
||||||
update_processes = []
|
update_processes = []
|
||||||
@ -1037,13 +1313,52 @@ if __name__ == '__main__':
|
|||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
except:
|
except:
|
||||||
last_updates = {"stocks": {"time": "06/03/2022 04:12:09", "force": True}, "crypto": {"time": "06/03/2022 04:10:39", "force": True},
|
last_updates = {"scheduler":{"force": False}, "stocks": {"time": "06/03/2022 04:12:09", "force": True}, "crypto": {"time": "06/03/2022 04:10:39", "force": True},
|
||||||
"news": {"time": "06/03/2022 04:07:09", "force": True}, "weather": {"time": "06/03/2022 04:08:20", "force": True},
|
"news": {"time": "06/03/2022 04:07:09", "force": True}, "weather": {"time": "06/03/2022 04:08:20", "force": True},
|
||||||
"forex": {"time": "06/03/2022 03:54:02", "force": True}, "sports_l": {"time": "06/03/2022 04:10:09", "force": True},
|
"forex": {"time": "06/03/2022 03:54:02", "force": True}, "sports_l": {"time": "06/03/2022 04:10:09", "force": True},
|
||||||
"sports_p": {"time": "06/03/2022 04:10:09", "force": True},
|
"sports_p": {"time": "06/03/2022 04:10:09", "force": True},
|
||||||
"sports_u": {"time": "06/03/2022 04:10:09", "force": True},"sports_t": {"time": "06/03/2022 04:10:09", "force": True}, "commodities": {"time": "06/03/2022 04:10:09", "force": True}, "indices": {"time": "06/03/2022 04:10:09", "force": True}, "movies": {"time": "06/03/2022 04:10:09", "force": True}}
|
"sports_u": {"time": "06/03/2022 04:10:09", "force": True},"sports_t": {"time": "06/03/2022 04:10:09", "force": True}, "commodities": {"time": "06/03/2022 04:10:09", "force": True}, "indices": {"time": "06/03/2022 04:10:09", "force": True}, "movies": {"time": "06/03/2022 04:10:09", "force": True}, "ipo": {"time": "06/03/2022 04:10:09", "force": True}, "prepost": {"time": "06/03/2022 04:10:09", "force": True}}
|
||||||
|
|
||||||
|
try:
|
||||||
|
if last_updates['scheduler']['force']:
|
||||||
|
try:
|
||||||
|
f = open('csv/scheduler.json','r')
|
||||||
|
schedules = json.load(f)
|
||||||
|
f.close()
|
||||||
|
shutdown_schedule_hour = schedules['shutdown']['hour']
|
||||||
|
shutdown_schedule_minute = schedules['shutdown']['minute']
|
||||||
|
|
||||||
|
reboot_schedule_hour = schedules['reboot']['hour']
|
||||||
|
reboot_schedule_minute = schedules['reboot']['minute']
|
||||||
|
|
||||||
|
timezone = schedules['timezone']
|
||||||
|
shutdown_enabled = schedules['shutdown']['enabled']
|
||||||
|
reboot_enabled = schedules['reboot']['enabled']
|
||||||
|
except:
|
||||||
|
shutdown_schedule_hour = "00"
|
||||||
|
shutdown_schedule_minute = "00"
|
||||||
|
|
||||||
|
reboot_schedule_hour = "00"
|
||||||
|
reboot_schedule_minute = "00"
|
||||||
|
|
||||||
|
timezone = "GMT"
|
||||||
|
shutdown_enabled = False
|
||||||
|
reboot_enabled = False
|
||||||
|
last_updates['scheduler']['force'] = False
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
#SHUTDOWN
|
||||||
|
try:
|
||||||
|
if datetime.now(pytz.timezone(timezone)).strftime("%H:%M") == shutdown_schedule_hour+':'+shutdown_schedule_minute and shutdown_enabled:
|
||||||
|
os.system('sudo shutdown now')
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
#REBOOT
|
||||||
|
try:
|
||||||
|
if datetime.now(pytz.timezone(timezone)).strftime("%H:%M") == reboot_schedule_hour+':'+reboot_schedule_minute and reboot_enabled:
|
||||||
|
os.system('sudo reboot')
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
|
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
|
||||||
@ -1073,6 +1388,33 @@ if __name__ == '__main__':
|
|||||||
update_process.start()
|
update_process.start()
|
||||||
update_processes.append(update_process)
|
update_processes.append(update_process)
|
||||||
|
|
||||||
|
NY_time1 = datetime.now(NY_zone).replace(tzinfo=None)
|
||||||
|
NY_time2 = datetime.now(NY_zone).replace(tzinfo=None)
|
||||||
|
#prepost
|
||||||
|
|
||||||
|
preopen = NY_time1.replace(hour=4, minute=0, second=0, microsecond=0).replace(tzinfo=None)
|
||||||
|
preclose = NY_time1.replace(hour=9, minute=30, second=0, microsecond=0).replace(tzinfo=None)
|
||||||
|
|
||||||
|
postopen = NY_time2.replace(hour=16, minute=0, second=0, microsecond=0).replace(tzinfo=None)
|
||||||
|
postclose = NY_time2.replace(hour=20, minute=20, second=0, microsecond=0).replace(tzinfo=None)
|
||||||
|
|
||||||
|
prepost_frequency = update_frequencies['prepost']
|
||||||
|
prepost_time = datetime.strptime(last_updates['prepost']['time'], "%d/%m/%Y %H:%M:%S")
|
||||||
|
|
||||||
|
pre_open = preopen < NY_time1 < preclose and NY_time1.weekday() <= 4
|
||||||
|
post_open = postopen < NY_time2 < postclose and NY_time2.weekday() <= 4
|
||||||
|
|
||||||
|
diff1 = (NY_time1 - prepost_time).total_seconds()/60 #minutes
|
||||||
|
diff2 = (NY_time2 - prepost_time).total_seconds()/60 #minutes
|
||||||
|
|
||||||
|
if (last_updates['prepost']['force']) or (diff1 >= update_frequencies['prepost'] and pre_open) or (diff2 >= update_frequencies['prepost'] and post_open):
|
||||||
|
prepost_time = NY_time1.strftime("%d/%m/%Y %H:%M:%S")
|
||||||
|
last_updates['prepost']['time'] = prepost_time
|
||||||
|
last_updates['prepost']['force'] = False
|
||||||
|
update_process = Process(target = updateStocksPrePost, args = (api_key,logf))
|
||||||
|
update_process.start()
|
||||||
|
update_processes.append(update_process)
|
||||||
|
|
||||||
# crypto
|
# crypto
|
||||||
crypto_time = datetime.strptime(last_updates['crypto']['time'], "%d/%m/%Y %H:%M:%S")
|
crypto_time = datetime.strptime(last_updates['crypto']['time'], "%d/%m/%Y %H:%M:%S")
|
||||||
|
|
||||||
@ -1144,6 +1486,24 @@ if __name__ == '__main__':
|
|||||||
update_process.start()
|
update_process.start()
|
||||||
update_processes.append(update_process)
|
update_processes.append(update_process)
|
||||||
|
|
||||||
|
|
||||||
|
# ipos
|
||||||
|
ipo_time = datetime.strptime(last_updates['ipo']['time'], "%d/%m/%Y %H:%M:%S")
|
||||||
|
|
||||||
|
|
||||||
|
NY_time = datetime.now(NY_zone).replace(tzinfo=None)
|
||||||
|
diff = (NY_time - ipo_time).total_seconds()/60 #minutes
|
||||||
|
|
||||||
|
|
||||||
|
if last_updates['ipo']['force'] or diff >= update_frequencies['ipo']:
|
||||||
|
ipo_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
|
||||||
|
last_updates['ipo']['time'] = ipo_time
|
||||||
|
last_updates['ipo']['force'] = False
|
||||||
|
update_process = Process(target = updateIpo, args = (ipo_key,logf))
|
||||||
|
update_process.start()
|
||||||
|
update_processes.append(update_process)
|
||||||
|
|
||||||
|
|
||||||
# weather
|
# weather
|
||||||
weather_time = datetime.strptime(last_updates['weather']['time'], "%d/%m/%Y %H:%M:%S")
|
weather_time = datetime.strptime(last_updates['weather']['time'], "%d/%m/%Y %H:%M:%S")
|
||||||
|
|
||||||
|
BIN
feature_titles/ipo.png
Normal file
After Width: | Height: | Size: 332 B |
BIN
feature_titles/small_feature_titles/ipo.png
Normal file
After Width: | Height: | Size: 274 B |
BIN
logos/commodities/CATTLE(lb).png
Normal file
After Width: | Height: | Size: 1.8 KiB |
BIN
logos/commodities/HOG(lb).png
Normal file
After Width: | Height: | Size: 2.3 KiB |
BIN
logos/down-tiny.png
Normal file
After Width: | Height: | Size: 139 B |
BIN
logos/news_logos/69news-wfmz-tv.png
Normal file
After Width: | Height: | Size: 6.4 KiB |
BIN
logos/news_logos/8-days.png
Normal file
After Width: | Height: | Size: 791 B |
BIN
logos/news_logos/brookings-institution.png
Normal file
After Width: | Height: | Size: 3.4 KiB |
BIN
logos/news_logos/c-span-.png
Normal file
After Width: | Height: | Size: 2.2 KiB |
BIN
logos/news_logos/carnegie-europe.png
Normal file
After Width: | Height: | Size: 1.3 KiB |
BIN
logos/news_logos/cheddar.png
Normal file
After Width: | Height: | Size: 2.2 KiB |
BIN
logos/news_logos/coindesk.png
Normal file
After Width: | Height: | Size: 3.4 KiB |
BIN
logos/news_logos/dentons.png
Normal file
After Width: | Height: | Size: 2.7 KiB |
BIN
logos/news_logos/dutchnews.nl.png
Normal file
After Width: | Height: | Size: 4.0 KiB |
BIN
logos/news_logos/euractiv.png
Normal file
After Width: | Height: | Size: 2.6 KiB |
BIN
logos/news_logos/eurasian-times.png
Normal file
After Width: | Height: | Size: 749 B |
BIN
logos/news_logos/european-commission.png
Normal file
After Width: | Height: | Size: 1.4 KiB |
BIN
logos/news_logos/foreign-policy.png
Normal file
After Width: | Height: | Size: 931 B |
BIN
logos/news_logos/gulf-news.png
Normal file
After Width: | Height: | Size: 1.8 KiB |
BIN
logos/news_logos/japan-today.png
Normal file
After Width: | Height: | Size: 4.1 KiB |
BIN
logos/news_logos/kcrw.png
Normal file
After Width: | Height: | Size: 1.2 KiB |
BIN
logos/news_logos/latestly.png
Normal file
After Width: | Height: | Size: 1.4 KiB |
BIN
logos/news_logos/markets-insider.png
Normal file
After Width: | Height: | Size: 2.2 KiB |
BIN
logos/news_logos/marketscreener.com.png
Normal file
After Width: | Height: | Size: 531 B |
BIN
logos/news_logos/mingtiandi.png
Normal file
After Width: | Height: | Size: 1.1 KiB |
BIN
logos/news_logos/mint.png
Normal file
After Width: | Height: | Size: 1.2 KiB |
Before Width: | Height: | Size: 2.1 KiB After Width: | Height: | Size: 3.2 KiB |
BIN
logos/news_logos/news-12-bronx.png
Normal file
After Width: | Height: | Size: 4.8 KiB |
BIN
logos/news_logos/nikkei-asia.png
Normal file
After Width: | Height: | Size: 2.8 KiB |
BIN
logos/news_logos/oecd.png
Normal file
After Width: | Height: | Size: 3.8 KiB |
BIN
logos/news_logos/pbs-newshour.png
Normal file
After Width: | Height: | Size: 2.0 KiB |
BIN
logos/news_logos/pew-research-center.png
Normal file
After Width: | Height: | Size: 1.4 KiB |
BIN
logos/news_logos/portugal-resident.png
Normal file
After Width: | Height: | Size: 4.7 KiB |
BIN
logos/news_logos/rfi-english.png
Normal file
After Width: | Height: | Size: 482 B |
BIN
logos/news_logos/rthk.png
Normal file
After Width: | Height: | Size: 2.4 KiB |
BIN
logos/news_logos/smithsonian-magazine.png
Normal file
After Width: | Height: | Size: 1.4 KiB |
BIN
logos/news_logos/the-atlantic.png
Normal file
After Width: | Height: | Size: 871 B |
BIN
logos/news_logos/the-diplomat.png
Normal file
After Width: | Height: | Size: 596 B |
BIN
logos/news_logos/the-hindu.png
Normal file
After Width: | Height: | Size: 1.3 KiB |
BIN
logos/news_logos/the-intercept.png
Normal file
After Width: | Height: | Size: 1.7 KiB |
BIN
logos/news_logos/the-local-france.png
Normal file
After Width: | Height: | Size: 1.1 KiB |
BIN
logos/news_logos/the-local-italy.png
Normal file
After Width: | Height: | Size: 1.0 KiB |
BIN
logos/news_logos/the-netherlands-and-you.png
Normal file
After Width: | Height: | Size: 992 B |
BIN
logos/news_logos/the-portugal-news.png
Normal file
After Width: | Height: | Size: 3.1 KiB |
BIN
logos/news_logos/the-wire.png
Normal file
After Width: | Height: | Size: 1.4 KiB |
BIN
logos/news_logos/theprint.png
Normal file
After Width: | Height: | Size: 2.9 KiB |
BIN
logos/news_logos/time-out.png
Normal file
After Width: | Height: | Size: 2.2 KiB |
BIN
logos/news_logos/trt-world.png
Normal file
After Width: | Height: | Size: 6.4 KiB |
BIN
logos/news_logos/u.s.-news-&-world-report.png
Normal file
After Width: | Height: | Size: 3.3 KiB |
BIN
logos/news_logos/voa-asia.png
Normal file
After Width: | Height: | Size: 1.7 KiB |
BIN
logos/news_logos/voice-of-america---voa-news.png
Normal file
After Width: | Height: | Size: 1.7 KiB |
BIN
logos/sports/league_logos/F1.png
Normal file
After Width: | Height: | Size: 739 B |
BIN
logos/sports/league_logos/LIV.png
Normal file
After Width: | Height: | Size: 7.9 KiB |
BIN
logos/sports/league_logos/NASCAR.png
Normal file
After Width: | Height: | Size: 4.7 KiB |
BIN
logos/sports/league_logos/UFC.png
Normal file
After Width: | Height: | Size: 1.3 KiB |
BIN
logos/tiny_news/69news-wfmz-tv.png
Normal file
After Width: | Height: | Size: 2.0 KiB |
BIN
logos/tiny_news/8-days.png
Normal file
After Width: | Height: | Size: 435 B |
BIN
logos/tiny_news/brookings-institution.png
Normal file
After Width: | Height: | Size: 1.6 KiB |
BIN
logos/tiny_news/c-span-.png
Normal file
After Width: | Height: | Size: 1.0 KiB |
BIN
logos/tiny_news/carnegie-europe.png
Normal file
After Width: | Height: | Size: 502 B |
BIN
logos/tiny_news/cheddar.png
Normal file
After Width: | Height: | Size: 1.3 KiB |
BIN
logos/tiny_news/coindesk.png
Normal file
After Width: | Height: | Size: 1.6 KiB |
BIN
logos/tiny_news/dentons.png
Normal file
After Width: | Height: | Size: 1.6 KiB |
BIN
logos/tiny_news/dutchnews.nl.png
Normal file
After Width: | Height: | Size: 1.8 KiB |
BIN
logos/tiny_news/euractiv.png
Normal file
After Width: | Height: | Size: 1.4 KiB |
BIN
logos/tiny_news/eurasian-times.png
Normal file
After Width: | Height: | Size: 535 B |
BIN
logos/tiny_news/european-commission.png
Normal file
After Width: | Height: | Size: 530 B |
BIN
logos/tiny_news/foreign-policy.png
Normal file
After Width: | Height: | Size: 502 B |
BIN
logos/tiny_news/gulf-news.png
Normal file
After Width: | Height: | Size: 736 B |
BIN
logos/tiny_news/japan-today.png
Normal file
After Width: | Height: | Size: 1.9 KiB |
BIN
logos/tiny_news/kcrw.png
Normal file
After Width: | Height: | Size: 770 B |
BIN
logos/tiny_news/latestly.png
Normal file
After Width: | Height: | Size: 1.1 KiB |
BIN
logos/tiny_news/markets-insider.png
Normal file
After Width: | Height: | Size: 1.1 KiB |
BIN
logos/tiny_news/marketscreener.com.png
Normal file
After Width: | Height: | Size: 341 B |
BIN
logos/tiny_news/mingtiandi.png
Normal file
After Width: | Height: | Size: 558 B |
BIN
logos/tiny_news/mint.png
Normal file
After Width: | Height: | Size: 818 B |
Before Width: | Height: | Size: 1.3 KiB After Width: | Height: | Size: 1.9 KiB |
BIN
logos/tiny_news/news-12-bronx.png
Normal file
After Width: | Height: | Size: 1.5 KiB |
BIN
logos/tiny_news/nikkei-asia.png
Normal file
After Width: | Height: | Size: 1.3 KiB |
BIN
logos/tiny_news/oecd.png
Normal file
After Width: | Height: | Size: 1.5 KiB |
BIN
logos/tiny_news/pbs-newshour.png
Normal file
After Width: | Height: | Size: 1.2 KiB |
BIN
logos/tiny_news/pew-research-center.png
Normal file
After Width: | Height: | Size: 653 B |
BIN
logos/tiny_news/portugal-resident.png
Normal file
After Width: | Height: | Size: 1.8 KiB |
BIN
logos/tiny_news/rfi-english.png
Normal file
After Width: | Height: | Size: 347 B |
BIN
logos/tiny_news/rthk.png
Normal file
After Width: | Height: | Size: 1.4 KiB |
BIN
logos/tiny_news/smithsonian-magazine.png
Normal file
After Width: | Height: | Size: 636 B |
BIN
logos/tiny_news/the-atlantic.png
Normal file
After Width: | Height: | Size: 436 B |
BIN
logos/tiny_news/the-diplomat.png
Normal file
After Width: | Height: | Size: 395 B |
BIN
logos/tiny_news/the-hindu.png
Normal file
After Width: | Height: | Size: 620 B |
BIN
logos/tiny_news/the-intercept.png
Normal file
After Width: | Height: | Size: 961 B |
BIN
logos/tiny_news/the-local-france.png
Normal file
After Width: | Height: | Size: 594 B |
BIN
logos/tiny_news/the-local-italy.png
Normal file
After Width: | Height: | Size: 591 B |
BIN
logos/tiny_news/the-netherlands-and-you.png
Normal file
After Width: | Height: | Size: 540 B |
BIN
logos/tiny_news/the-portugal-news.png
Normal file
After Width: | Height: | Size: 1.4 KiB |
BIN
logos/tiny_news/the-wire.png
Normal file
After Width: | Height: | Size: 915 B |
BIN
logos/tiny_news/theprint.png
Normal file
After Width: | Height: | Size: 1.4 KiB |
BIN
logos/tiny_news/time-out.png
Normal file
After Width: | Height: | Size: 994 B |