# Copyright (C) 2020 Fintic, finticofficial@gmail.com
#
# This file is part of Fintic project, developed by Neythen Treloar and Justin Dunn
#
# This code can not be copied and/or distributed without the express
# permission of Fintic

import finnhub
import time
import csv
import pytz
from datetime import datetime, timedelta
import json
import datetime as dt
import sys, os, base64, hashlib, hmac, select
import requests
from pycoingecko import CoinGeckoAPI
from newsapi import NewsApiClient
import traceback 
from geopy import geocoders 
from multiprocessing import Process

def getInput(Block=False):
    if Block or select.select([sys.stdin], [], [], 0) == ([sys.stdin], [], []):
        msg = sys.stdin.read(1)
        #sys.stdin.flush()
    else:
        msg = ''
    return msg

def emptyInfo(symbols, stock_info):
    update = False
    for symbol in symbols:
        if stock_info[symbol] == -1: # stock with no info
            update = True
    return update
    
def updateUpdate(NY_time):
    NY_str = NY_time.strftime("%d/%m/%Y %H:%M:%S")
    f = open('csv/last_update.csv', 'w+')
    f.write(NY_str + '\n')
    f.close()



def updateStocks(api_key, logf):
   

    try:
       

        max_stocks = 200

        f = open('csv/stocks_settings.json', 'r')
        all_stocks_settings = json.load(f)
        f.close()
        stock_info = all_stocks_settings['symbols']
        symbols = list(stock_info.keys())
     
    
        url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/stocks?symbols='
        
        for symbol in symbols:
            url += symbol + ','
            
        url += '&apiKey=' + api_key
        response = requests.get(url)
        data = response.json()
       
            
        stock_info = {}
        if len(data) > 0:
            for symbol in symbols:
                for stock in data:
                    if stock['symbol'] == symbol:
                        stock_info[stock['symbol']] = {'current': stock['price'], 'change': stock['change_since'], 'percent_change':stock['percent']}
                    
                    
            all_stocks_settings['symbols'] = stock_info
                
           
            f = open('csv/stocks_settings.json', 'w+')
            json.dump(all_stocks_settings, f)
            f.close()
        
    except Exception as e:
    
        logf = open('log.txt', "a")
        exc_type, exc_obj, exc_tb = sys.exc_info()
        fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
        logf.write(str(e))
        logf.write('. file: ' + fname)
        logf.write('. line: ' +  str(exc_tb.tb_lineno))
        logf.write('. type: ' +  str(exc_type))
        logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
        logf.close()
        

def updateCrypto(api_key, logf):
 


    try:
        
        
        f = open('csv/crypto_settings.json', 'r')
        all_crypto_settings = json.load(f)
        f.close()
        
        coin_info = all_crypto_settings['symbols']
        symbol_base = list(coin_info.keys())
        
        symbols = [sb.split(',')[0] for sb in symbol_base]
        bases = [sb.split(',')[1] for sb in symbol_base]
        unique_bases = list(set(bases))
        
        url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/crypto?symbols='
        
        for i,s in enumerate(symbols):
            url += bases[i] + '-' + s + ','
        url = url[:-1] #remove last comma
        url += '&apiKey=' + api_key
   
        response = requests.get(url)
        data = response.json()
    
    
        
        coin_info = {}
        if len(data) > 0:
            for sb in symbol_base:
                for i,d in enumerate(data): 
                    
                    symbol = d['symbol']
                    base = d['currency']
                    
                    if symbol.upper() + ',' + base.upper() == sb:
                    
                        coin_info[symbol.upper() + ',' + base.upper()] = {'current': d['price'], '24hr_change': d['price_over_24hr'], 'percent_change': d['percent_over_24hr']}
                   
            all_crypto_settings['symbols'] = coin_info
            f = open('csv/crypto_settings.json', 'w+')
            json.dump(all_crypto_settings, f)
            f.close()
        
    except Exception as e:
        
        logf = open('log.txt', "a")
        exc_type, exc_obj, exc_tb = sys.exc_info()
        fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
        logf.write(str(e))
        logf.write('. file: ' + fname)
        logf.write('. line: ' +  str(exc_tb.tb_lineno))
        logf.write('. type: ' +  str(exc_type))
        logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
        logf.close()

def updateForex(api_key, logf):
    

    
    try:
        f = open('csv/forex_settings.json', 'r')
        all_forex_settings = json.load(f)
        f.close()
        
        forex_info = all_forex_settings['symbols']
        symbol_base = list(forex_info.keys())
        
        symbols = [sb.split(',')[0] for sb in symbol_base]
        bases = [sb.split(',')[1] for sb in symbol_base]
        unique_bases = list(set(bases))
        
        
        targets = ','.join(symbols)
        
        url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/forex?symbols='
        
        
        for i,s in enumerate(symbols):
            url += s + '-' + bases[i] + ','
        url = url[:-1] #remove last comma
        url += '&apiKey=' + api_key
            
        response = requests.get(url)
        data = response.json()
        
      
        if len(data) > 0:

            c_dict = {}
            for sb in symbol_base:
                for d in data:
                    if d['uid'].replace('/',',') == sb:
                        c_dict[d['uid'].replace('/',',')] = {'current': d['rate'], '24hr_change': d['rate_over_24hr'], 'percent_change':d['percent_over_24hr']}
            
         
            
            
            all_forex_settings['symbols'] = c_dict
            
            f = open( "csv/forex_settings.json", 'w+' )
            json.dump(all_forex_settings,  f)
            f.close()
    except Exception as e:
  
        logf = open('log.txt', "a")
        exc_type, exc_obj, exc_tb = sys.exc_info()
        fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
        logf.write(str(e))
        logf.write('. file: ' + fname)
        logf.write('. line: ' +  str(exc_tb.tb_lineno))
        logf.write('. type: ' +  str(exc_type))
        logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
        logf.close()


def updateNews(api_key, logf):
    
    #'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news?category=technology'
    #'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news?country=GB'
    #'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news?lang=en'
    
    
    try:
        f = open('csv/news_settings.json', 'r')
        all_settings = json.load(f)
        f.close()
        
        
        
        
        if all_settings['use_country']:
            if all_settings['country'] == 'Worldwide':
                url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news_worldwide?'
            else:
                c_dict = {'United States':'US', 'Australia':'AU', 'Canada': 'CA', 'Great Britain':'GB', 'New Zealand':'NZ', 'Ireland':'IE', 'Singapore':'SG', 'South Africa': 'ZA'}
                cc = c_dict[all_settings['country']]
                url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news?country={}'.format(cc)
        elif all_settings['use_category']:
            url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news?category={}'.format(all_settings['category'])
        
        url += '&apiKey=' + api_key
        response = requests.get(url)
        data = response.json()
   
        if len(data) > 0:
            max_headlines = int(all_settings['num_headlines'])
            #load user settings
            headlines = data[:max_headlines]
            headline_sources = [headline['source'] for headline in headlines]
            
                
            headline_titles = [headline['title'] for headline in headlines]
            
            headline_times = [headline['publishedAt'] for headline in headlines]
            
            headlines = list(zip(headline_titles, headline_sources, headline_times))
    
            all_settings['headlines'] = headlines
            
            f = open('csv/news_settings.json', 'w+')
            json.dump(all_settings, f)
            f.close()
        
            
        
    except Exception as e:
        
        logf = open('log.txt', "a")
        exc_type, exc_obj, exc_tb = sys.exc_info()
        fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
        logf.write(str(e))
        logf.write('. file: ' + fname)
        logf.write('. line: ' +  str(exc_tb.tb_lineno))
        logf.write('. type: ' +  str(exc_type))
        logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
        logf.close()

    
def updateWeather(api_key, logf):

    max_cities = 30
    
    try:
        gn = geocoders.GeoNames(username='fintic')
        
        f = open('csv/daily_weather.json', 'r')
        all_daily_settings = json.load(f)
        f.close()
        
        f = open('csv/current_weather.json', 'r')
        all_current_settings = json.load(f)
        f.close()
        
        current_locations = list(all_current_settings['locations'].keys())
        daily_locations = list(all_daily_settings['locations'].keys())
        
        all_locations = list(set(current_locations + daily_locations))
        
       
        
        
        current_weathers = {}
        daily_weathers = {}
        
        
        
        for location in all_locations:
            loc = gn.geocode(location)
            current_weather = {}
            
            lat = loc.latitude
            lon = loc.longitude
            url = 'https://api.openweathermap.org/data/2.5/onecall?lat={}&units=metric&lon={}&appid={}'.format(lat, lon, api_key)
            r = requests.get(url)
           
            weather = r.json()['current']
            
            current_weather['main_weather'] = weather['weather'][0]['main']
            current_weather['description'] = weather['weather'][0]['description']
            current_weather['temp'] = weather['temp']
            current_weather['min_temp'] = r.json()['daily'][0]['temp']['min']
            current_weather['max_temp'] = r.json()['daily'][0]['temp']['max']
            current_weather['feels_like'] = weather['feels_like']
            current_weather['humidity'] = weather['humidity']
            current_weather['clouds'] = weather['clouds']
            current_weather['wind_speed'] = weather['wind_speed']
            current_weather['wind_direction'] = weather['wind_deg']
            current_weather['visibility'] = weather['visibility']
            current_weather['uv'] = weather['uvi']
            current_weather['rain_chance'] =  r.json()['hourly'][0]['pop']
            
            
            if location in current_locations:
                current_weathers[location] = current_weather
            
            daily_weather = []
            daily = r.json()['daily']
            
            for day in daily:
                dct = {}
                dct['main_weather'] = day['weather'][0]['main']
                dct['description'] = day['weather'][0]['description']
                dct['min_temp'] = day['temp']['min']
                dct['max_temp'] = day['temp']['max']
                daily_weather.append(dct)
                
            #add relevant urrent information to first day in daily
            daily_weather[0]['temp'] = weather['temp']
            daily_weather[0]['rain_chance'] = current_weather['rain_chance']
            daily_weather[0]['humidity'] = current_weather['humidity']
            daily_weather[0]['wind_speed'] = current_weather['wind_speed']
            daily_weather[0]['uv'] = current_weather['uv']
            daily_weather[0]['clouds'] = current_weather['clouds']
            daily_weather[0]['wind_speed'] = current_weather['wind_speed']
            daily_weather[0]['wind_direction'] = current_weather['wind_direction']
            daily_weather[0]['visibility'] = current_weather['visibility']
            
            if location in daily_locations:
                daily_weathers[location] = daily_weather

            
        
        all_current_settings['locations'] = current_weathers
        all_daily_settings['locations'] = daily_weathers
        f = open( "csv/current_weather.json", 'w+' )
        json.dump( all_current_settings,  f)
        f.close()
        
        f = open( "csv/daily_weather.json", 'w+' )
        json.dump( all_daily_settings,  f)
        f.close()
    
    except Exception as e:
        logf = open('log.txt', "a")
        exc_type, exc_obj, exc_tb = sys.exc_info()
        fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
        logf.write(str(e))
        logf.write('. file: ' + fname)
        logf.write('. line: ' +  str(exc_tb.tb_lineno))
        logf.write('. type: ' +  str(exc_type))
        logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))

        logf.close()

        
def updateLeagueTables(api_key, logf):

    url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/sports?stats='
    try:
        f = open('csv/league_tables.json', 'r')
        all_settings = json.load(f)

        f.close()
        
        leagues = all_settings['leagues'].keys()
        leagues_info = {}
        
        for league in leagues:
            if league == 'PREMIERLEAGUE':
                url += 'PREMIERLEAGUE,'
            else:
                url += league + ','
            
        url = url[:-1] # remove last comma 
        url += '&apiKey=' + api_key
        r = requests.get(url)
          
        all_data = r.json()
        
        

        for i,l in enumerate(all_data):
   
            league = list(l.keys())[0]
        

            teams = []
            
            
            for d in all_data[i][league]:
                team = {}
                
               
                    
                team['name'] = d['strTeam']
                team['wins'] = d['intWin']
                team['loss'] = d['intLoss']
                team['draw'] = d['intDraw']
                #team['played'] = d['intPlayed']
                team['standing'] = d['intRank']
                #team['points'] = d['intPoints']
                
                teams.append(team)
            leagues_info[league.upper()] = teams
        
        all_settings['leagues'] = leagues_info
        f = open( "csv/league_tables.json".format(league), 'w+' )
        json.dump(all_settings,  f)
        
        f.close()
    except Exception as e:
        
        logf = open('log.txt', "a")
        exc_type, exc_obj, exc_tb = sys.exc_info()
        fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
        logf.write(str(e))
        logf.write('. file: ' + fname)
        logf.write('. line: ' +  str(exc_tb.tb_lineno))
        logf.write('. type: ' +  str(exc_type))
        logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
        logf.close()

    
def updateLeagueEvents(api_key, time, logf):

    url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/sports?{}='.format(time)
    
    if time == 'past':
        
        f = open('csv/past_games.json')
    elif time == 'upcoming':
       
        f = open('csv/upcoming_games.json')
    elif time == 'livescore':
        f = open('csv/live_games.json')
    
    
        
    
    try:
        all_settings = json.load(f)

        f.close()
        leagues = all_settings['leagues'].keys()
        leagues_info = {}
       
        for league in leagues:
            
            if league == 'PREMIERLEAGUE':
                url += 'PREMIERLEAGUE,'
            else:
                url += league + ','
        url = url[:-1] # remove last comma 
        url += '&apiKey=' + api_key
        
        r = requests.get(url)
        
        all_data = r.json()
        
        for league in all_data.keys():
            events = []
            for d in all_data[league]:
                event = {}
                event['date'] = d['dateEvent']
                
                if time == 'live':
                   
                    event['progess'] = d['strProgress']
                    event['status'] = d['strStatus']
                else:
                    
                    event['round'] = d['intRound']
                event['time'] = d['strTime']
                event['home_team'] = d['strHomeTeam']
                
                event['away_team'] = d['strAwayTeam']
                
                if time != 'upcoming':
                    event['away_score'] = d['intAwayScore']
                    event['home_score'] = d['intHomeScore']
                
                events.append(event)
            leagues_info[league.upper()] = events
        all_settings['leagues'] = leagues_info

        f = open( "csv/{}_games.json".format(time), 'w+' )
        json.dump(all_settings,  f)
        f.close()
    except Exception as e:
        logf = open('log.txt', "a")
        exc_type, exc_obj, exc_tb = sys.exc_info()
        fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
        logf.write(str(e))
        logf.write('. file: ' + fname)
        logf.write('. line: ' +  str(exc_tb.tb_lineno))
        logf.write('. type: ' +  str(exc_type))
        logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
        logf.close()
      

def updateSports(api_key, logf):
    #read user settings to decide which sprots to update
   
    updateLeagueTables(api_key, logf)
    
    updateLeagueEvents(api_key,'livescore', logf) 
    updateLeagueEvents(api_key,'past', logf) 
    updateLeagueEvents(api_key,'upcoming', logf) 




def checkStocks(last_update, update_frequency):
    NY_time = datetime.now(NY_zone).replace(tzinfo=None)
    opening = NY_time.replace(hour=9, minute=30, second=0, microsecond=0).replace(tzinfo=None)
    closing = NY_time.replace(hour=16, minute=5, second=0, microsecond=0).replace(tzinfo=None)
    
    
    f = open('csv/stocks_settings.json', 'r')
    all_stocks_settings = json.load(f)
    f.close()
    stock_info = all_stocks_settings['symbols']
    symbols = list(stock_info.keys())
    
    updated = False

    diff = (NY_time - last_update).total_seconds()/60 #minutes
    if opening < NY_time < closing and datetime.today().weekday() < 5: # we need to do real time updating
        
        
        if diff >= update_frequency:
            updated = True
            
        
        
    elif emptyInfo(symbols, stock_info): # if theres any empty stocks
        updated = True
        
        
    
    else:
        # update if last update was before the previous days closing
        yday_closing = closing - dt.timedelta(days=1)
        yday_str = yday_closing.strftime("%d/%m/%Y %H:%M:%S")
        yday_closing = datetime.strptime(yday_str, "%d/%m/%Y %H:%M:%S")
        
        if last_update < yday_closing:
            updated = True
            
            
    return updated


def updateAll(api_key, weather_key, logf):
    updateStocks(api_key, logf)

    updateCrypto(api_key, logf)

    updateForex(api_key, logf)

    updateNews(api_key, logf)
 
    updateSports(api_key, logf)
    
    if weather_key:
        updateWeather(weather_key, logf)
    
            
if __name__ == '__main__':
    logf = open("log.txt", "a")
    
    t = time.time()
    
    
    
    

    update_frequencies = {'stocks':2, 'crypto':5, 'forex':60, 'news':120, 'weather': 120, 'sports': 1440} #minutes

    NY_zone = pytz.timezone('America/New_York')
    CET_zone = pytz.timezone('EST')

    NY_time = datetime.now(NY_zone)

    CET_time = datetime.now(CET_zone)
    
    NY_str = NY_time.strftime("%d/%m/%Y %H:%M:%S")
    CET_str = NY_time.strftime("%d/%m/%Y %H:%M:%S")
    
    #f = open('csv/last_updates.json', 'w+')
    #update_times = {'stocks':NY_str, 'crypto':NY_str, 'news':NY_str, 'weather': NY_str, 'forex': CET_str} # all in NY time apart from forex in CET
    #json.dump(update_times, f)
    #f.close()
    
    f = open('api_keys.txt')
    
    api_keys = f.readlines()
    api_key = api_keys[0].strip()
    

    try:
        weather_key = api_keys[1].strip()
    except Exception as e:
        weather_key = False
        logf = open('log.txt', "a")
        exc_type, exc_obj, exc_tb = sys.exc_info()
        fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
        logf.write(str(e))
        logf.write('. file: ' + fname)
        logf.write('. line: ' +  str(exc_tb.tb_lineno))
        logf.write('. type: ' +  str(exc_type))
        logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
        logf.close()
    
    
    
    
    t = time.time()
    update_processes = []
    
    
    
    
    
    try:
        while True:
            
            try:
                f = open('csv/last_updates.json', 'r')
                last_updates = json.load(f)
                f.close()
                
            except:
                last_updates = {"stocks": {"time": "06/03/2022 04:12:09", "force": True}, "crypto": {"time": "06/03/2022 04:10:39", "force": True}, 
                "news": {"time": "06/03/2022 04:07:09", "force": True}, "weather": {"time": "06/03/2022 04:08:20", "force": True}, 
                "forex": {"time": "06/03/2022 03:54:02", "force": True}, "sports_l": {"time": "06/03/2022 04:10:09", "force": True}, 
                "sports_p": {"time": "06/03/2022 04:10:09", "force": True},
                "sports_u": {"time": "06/03/2022 04:10:09", "force": True},"sports_t": {"time": "06/03/2022 04:10:09", "force": True}}

            
           
            
            NY_time = datetime.now(NY_zone).replace(tzinfo=None)
         
            #msg = getInput()
            
                 
              
            NY_time = datetime.now(NY_zone).replace(tzinfo=None)
            #stocks
            
            stock_time = datetime.strptime(last_updates['stocks']['time'], "%d/%m/%Y %H:%M:%S")
            stock_frequency = update_frequencies['stocks']
            diff = (NY_time - stock_time).total_seconds()/60 #minutes
            
            NY_time = datetime.now(NY_zone).replace(tzinfo=None)
            opening = NY_time.replace(hour=9, minute=30, second=0, microsecond=0).replace(tzinfo=None)
            closing = NY_time.replace(hour=16, minute=5, second=0, microsecond=0).replace(tzinfo=None)
            stock_open = opening < NY_time < closing and datetime.today().weekday() <= 4 
            
            if last_updates['stocks']['force'] or (diff >= update_frequencies['stocks'] and stock_open):# or msg == 's':
                stock_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
                last_updates['stocks']['time'] = stock_time
                last_updates['stocks']['force'] = False
                #updateStocks(api_key)
                update_process = Process(target = updateStocks, args = (api_key,logf))
                update_process.start()
                update_processes.append(update_process)
                
            # crypto
            crypto_time = datetime.strptime(last_updates['crypto']['time'], "%d/%m/%Y %H:%M:%S")
           
            
            NY_time = datetime.now(NY_zone).replace(tzinfo=None)
            diff = (NY_time - crypto_time).total_seconds()/60 #minutes
           
            
            if last_updates['crypto']['force'] or diff >= update_frequencies['crypto']:# or msg == 'c':
                crypto_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
                #updateCrypto(api_key, logf)
               
                
                last_updates['crypto']['time'] = crypto_time
                last_updates['crypto']['force'] = False
                update_process = Process(target = updateCrypto, args = (api_key,logf))
                update_process.start()
                update_processes.append(update_process)
                    
                    
            # weather
            weather_time = datetime.strptime(last_updates['weather']['time'], "%d/%m/%Y %H:%M:%S")
        
            
            NY_time = datetime.now(NY_zone).replace(tzinfo=None)
            diff = (NY_time - weather_time).total_seconds()/60 #minutes
            if last_updates['weather']['force'] or diff >= update_frequencies['weather']:# or msg == 'w':
                weather_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
                
                #updateWeather(weather_key)
                last_updates['weather']['time'] = weather_time
                last_updates['weather']['force'] = False
                update_process = Process(target = updateWeather, args = (weather_key,logf))
                update_process.start()
                update_processes.append(update_process)
                
                
            # news
            news_time = datetime.strptime(last_updates['news']['time'], "%d/%m/%Y %H:%M:%S")
           
            
            NY_time = datetime.now(NY_zone).replace(tzinfo=None)
            diff = (NY_time - news_time).total_seconds()/60 #minutes
            if last_updates['news']['force'] or diff >= update_frequencies['news']:# or msg == 'n':
                news_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
                #updateNews(api_key)
                last_updates['news']['time'] = news_time
                last_updates['news']['force'] = False
                update_process = Process(target = updateNews, args = (api_key,logf))
                update_process.start()
                update_processes.append(update_process)
            
            
            
             
    
   
            # sports upcoming
            sports_time = datetime.strptime(last_updates['sports_u']['time'], "%d/%m/%Y %H:%M:%S")
            NY_time = datetime.now(NY_zone).replace(tzinfo=None)
            diff = (NY_time - sports_time).total_seconds()/60 #minutes
            if last_updates['sports_u']['force'] or diff >= update_frequencies['sports']:# or msg == 'S':
                sports_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
                #updateSports(api_key)
                last_updates['sports_u']['time'] = sports_time
                last_updates['sports_u']['force'] = False
                update_process = Process(target = updateLeagueEvents, args = (api_key,'upcoming',logf))
                update_process.start()
                update_processes.append(update_process)
            
            # sports live
            sports_time = datetime.strptime(last_updates['sports_l']['time'], "%d/%m/%Y %H:%M:%S")
            NY_time = datetime.now(NY_zone).replace(tzinfo=None)
            diff = (NY_time - sports_time).total_seconds()/60 #minutes
            if last_updates['sports_l']['force'] or diff >= update_frequencies['sports']:# or msg == 'S':
                sports_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
                #updateSports(api_key)
                last_updates['sports_l']['time'] = sports_time
                last_updates['sports_l']['force'] = False
                update_process = Process(target = updateLeagueEvents, args = (api_key, 'livescore',logf))
                update_process.start()
                update_processes.append(update_process)
            
            # sports past
            sports_time = datetime.strptime(last_updates['sports_p']['time'], "%d/%m/%Y %H:%M:%S")
            NY_time = datetime.now(NY_zone).replace(tzinfo=None)
            diff = (NY_time - sports_time).total_seconds()/60 #minutes
            if last_updates['sports_p']['force'] or diff >= update_frequencies['sports']:# or msg == 'S':
                sports_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
                #updateSports(api_key)
                last_updates['sports_p']['time'] = sports_time
                last_updates['sports_p']['force'] = False
                update_process = Process(target = updateLeagueEvents, args = (api_key,'past',logf))
                update_process.start()
                update_processes.append(update_process)
            
            # sports table
            sports_time = datetime.strptime(last_updates['sports_t']['time'], "%d/%m/%Y %H:%M:%S")
            NY_time = datetime.now(NY_zone).replace(tzinfo=None)
            diff = (NY_time - sports_time).total_seconds()/60 #minutes
            if last_updates['sports_t']['force'] or diff >= update_frequencies['sports']:# or msg == 'S':
                sports_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
                #updateSports(api_key)
                last_updates['sports_t']['time'] = sports_time
                last_updates['sports_t']['force'] = False
                update_process = Process(target = updateLeagueTables, args = (api_key,logf))
                update_process.start()
                update_processes.append(update_process)
            
            
                
            #forex updates once every 24hours at 1700 CET
            
            # update if last update was before the previous days closing
            forex_time = datetime.strptime(last_updates['forex']['time'], "%d/%m/%Y %H:%M:%S")
            CET_time = datetime.now(CET_zone).replace(tzinfo=None)
            yday_update = (CET_time.replace(hour=17, minute=00, second=0, microsecond=0) - dt.timedelta(days=1)).replace(tzinfo=None)
            diff = (CET_time.replace(tzinfo=None) - forex_time).total_seconds()/60
            
            opening = CET_time.replace(hour=17, minute=0, second=0, microsecond=0).replace(tzinfo=None)
            
            #forex updates between 5pm sunday and 5pm friday every hour
            forex_open = datetime.today().weekday() < 4 or (datetime.today().weekday() == 6 and CET_time > opening) or (datetime.today().weekday() == 4 and CET_time < opening)
                
            if last_updates['forex']['force'] or (diff >= update_frequencies['forex'] and forex_open):# or msg == 'f':
                forex_time = CET_time.strftime("%d/%m/%Y %H:%M:%S")
                last_updates['forex']['time'] = forex_time  
                last_updates['forex']['force'] = False
                #updateForex(api_key)
                update_process = Process(target = updateForex, args = (api_key,logf))
                update_process.start()
                update_processes.append(update_process)
                
            f = open('csv/last_updates.json', 'w+')
            json.dump(last_updates, f)
            f.close()
            for process in update_processes:
                if not process.is_alive():
                    process.join()
                    process.terminate()
                    update_processes.remove(process)
        
            time.sleep(10)
       
                
    except Exception as e: 
       
      
        logf = open('log.txt', "a")
        exc_type, exc_obj, exc_tb = sys.exc_info()
        fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
        logf.write(str(e))
        logf.write('. file: ' + fname)
        logf.write('. line: ' +  str(exc_tb.tb_lineno))
        logf.write('. type: ' +  str(exc_type))
        logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
        logf.close()