# Copyright (C) 2020 Fintic, finticofficial@gmail.com
#
# This file is part of Fintic project, developed by Neythen Treloar and Justin Dunn
#
# This code can not be copied and/or distributed without the express
# permission of Fintic

import finnhub
import time
import csv
import pytz
from datetime import datetime, timedelta
import json
import datetime as dt
import sys, os, base64, hashlib, hmac, select
import requests
from pycoingecko import CoinGeckoAPI
from newsapi import NewsApiClient
import traceback 
from geopy import geocoders 

def getInput(Block=False):
    if Block or select.select([sys.stdin], [], [], 0) == ([sys.stdin], [], []):
        msg = sys.stdin.read(1)
        #sys.stdin.flush()
    else:
        msg = ''
    return msg
    


def emptyInfo(symbols, stock_info):
    update = False
    for symbol in symbols:
        if stock_info[symbol] == -1: # stock with no info
            update = True
    return update
    
def updateUpdate(NY_time):
    NY_str = NY_time.strftime("%d/%m/%Y %H:%M:%S")
    f = open('csv/last_update.csv', 'w+')
    f.write(NY_str + '\n')
    f.close()

def updateStocks(api_key, logf):
   
    
    try:
       

        max_stocks = 200

        f = open('csv/stocks_settings.json', 'r')
        all_stocks_settings = json.load(f)
        f.close()
        stock_info = all_stocks_settings['symbols']
        symbols = list(stock_info.keys())
        print(symbols)
    
        url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/stocks?symbols='
        
        for symbol in symbols:
            url += symbol + ','
            
        url += '&apiKey=' + api_key
        response = requests.get(url)
        data = response.json()
        print(response)
        print(dir(response))
        print(len(data))
            
        stock_info = {}
        if len(data) > 0:
            for symbol in symbols:
                for stock in data:
                    if stock['symbol'] == symbol:
                        stock_info[stock['symbol']] = {'current': stock['price'], 'change': stock['change_since'], 'percent_change':stock['percent']}
                    
                    
            all_stocks_settings['symbols'] = stock_info
                
           
          
            json.dump(all_stocks_settings, open('csv/stocks_settings.json', 'w+'))
        
    except Exception as e:
    
        exc_type, exc_obj, exc_tb = sys.exc_info()
        fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
        logf.write(str(e))
        logf.write('. file: ' + fname)
        logf.write('. line: ' +  str(exc_tb.tb_lineno))
        logf.write('. type: ' +  str(exc_type))
        logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
        

def updateCrypto(api_key, logf):
 
    
    #cypto_info['symbol, base'].keys() = ['current','24hr change']
    try:
        
        
        f = open('csv/crypto_settings.json', 'r')
        all_crypto_settings = json.load(f)
        f.close()
        
        coin_info = all_crypto_settings['symbols']
        symbol_base = list(coin_info.keys())
        
        symbols = [sb.split(',')[0] for sb in symbol_base]
        bases = [sb.split(',')[1] for sb in symbol_base]
        unique_bases = list(set(bases))
        
        url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/crypto?symbols='
        
        for i,s in enumerate(symbols):
            url += bases[i] + '-' + s + ','
        url = url[:-1] #remove last comma
        url += '&apiKey=' + api_key
        print(url)
        response = requests.get(url)
        data = response.json()
        print(url)
        print(data)
    
        
        coin_info = {}
        if len(data) > 0:
            for sb in symbol_base:
                for i,d in enumerate(data): 
                    
                    symbol = d['symbol']
                    base = d['currency']
                    
                    if symbol.upper() + ',' + base.upper() == sb:
                    
                        coin_info[symbol.upper() + ',' + base.upper()] = {'current': d['price'], '24hr_change': d['price_over_24hr'], 'percent_change': d['percent_over_24hr']}
                   
            all_crypto_settings['symbols'] = coin_info
            f = open('csv/crypto_settings.json', 'w+')
            json.dump(all_crypto_settings, f)
            f.close()
        
    except Exception as e:
        
        exc_type, exc_obj, exc_tb = sys.exc_info()
        fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
        logf.write(str(e))
        logf.write('. file: ' + fname)
        logf.write('. line: ' +  str(exc_tb.tb_lineno))
        logf.write('. type: ' +  str(exc_type))
        logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))

def updateForex(api_key, logf):
    
    
    
    try:
        f = open('csv/forex_settings.json', 'r')
        all_forex_settings = json.load(f)
        f.close()
        
        forex_info = all_forex_settings['symbols']
        symbol_base = list(forex_info.keys())
        
        symbols = [sb.split(',')[0] for sb in symbol_base]
        bases = [sb.split(',')[1] for sb in symbol_base]
        unique_bases = list(set(bases))
        
        
        targets = ','.join(symbols)
        
        url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/forex?symbols='
        
        
        for i,s in enumerate(symbols):
            url += s + '-' + bases[i] + ','
        url = url[:-1] #remove last comma
        url += '&apiKey=' + api_key
            
        response = requests.get(url)
        data = response.json()
        
      
        if len(data) > 0:
            print(data)
            c_dict = {}
            for sb in symbol_base:
                for d in data:
                    if d['uid'].replace('/',',') == sb:
                        c_dict[d['uid'].replace('/',',')] = {'current': d['rate'], '24hr_change': d['rate_over_24hr'], 'percent_change':d['percent_over_24hr']}
            
         
            
            
            all_forex_settings['symbols'] = c_dict
            json.dump(all_forex_settings,  open( "csv/forex_settings.json", 'w+' ))
    except Exception as e:
  
        exc_type, exc_obj, exc_tb = sys.exc_info()
        fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
        logf.write(str(e))
        logf.write('. file: ' + fname)
        logf.write('. line: ' +  str(exc_tb.tb_lineno))
        logf.write('. type: ' +  str(exc_type))
        logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))


def updateNews(api_key, logf):
    
    #'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news?category=technology'
    #'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news?country=GB'
    #'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news?lang=en'
    
   
    try:
        
        all_settings = json.load(open('csv/news_settings.json', 'r'))
        
        
        
        if all_settings['use_country']:
            if all_settings['country'] == 'Worldwide':
                url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news_worldwide'
            else:
                c_dict = {'United States':'US', 'Australia':'AU', 'Canada': 'CA', 'Great Britain':'GB', 'New Zealand':'NZ', 'Ireland':'IE', 'Singapore':'SG', 'South Africa': 'ZA'}
                cc = c_dict[all_settings['country']]
                url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news?country={}'.format(cc)
        elif all_settings['use_category']:
            url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/news?category={}'.format(all_settings['category'])
        
        url += '&apiKey=' + api_key
        response = requests.get(url)
        data = response.json()
        print(data)
        if len(data) > 0:
            max_headlines = int(all_settings['num_headlines'])
            #load user settings
            headlines = data[:max_headlines]
            headline_sources = [headline['source'] for headline in headlines]
            
                
            headline_titles = [headline['title'] for headline in headlines]
            
            headline_times = [headline['publishedAt'] for headline in headlines]
            
            headlines = list(zip(headline_titles, headline_sources, headline_times))
            print(headlines)
            all_settings['headlines'] = headlines
            
            json.dump(all_settings, open('csv/news_settings.json', 'w+'))
        
            
        
    except Exception as e:
        
        exc_type, exc_obj, exc_tb = sys.exc_info()
        fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
        logf.write(str(e))
        logf.write('. file: ' + fname)
        logf.write('. line: ' +  str(exc_tb.tb_lineno))
        logf.write('. type: ' +  str(exc_type))
        logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))

    
def updateWeather(api_key, logf):
    max_cities = 30
    
    try:
        gn = geocoders.GeoNames(username='fintic')
        
        f = open('csv/daily_weather.json', 'r')
        all_daily_settings = json.load(f)
        f.close()
        
        f = open('csv/current_weather.json', 'r')
        all_current_settings = json.load(f)
        f.close()
        
        current_locations = list(all_current_settings['locations'].keys())
        daily_locations = list(all_daily_settings['locations'].keys())
        
        all_locations = list(set(current_locations + daily_locations))
        
       
        
        
        current_weathers = {}
        daily_weathers = {}
        
        
        
        for location in all_locations:
            loc = gn.geocode(location)
            current_weather = {}
            
            lat = loc.latitude
            lon = loc.longitude
            url = 'https://api.openweathermap.org/data/2.5/onecall?lat={}&units=metric&lon={}&appid={}'.format(lat, lon, api_key)
            r = requests.get(url)
           
            weather = r.json()['current']
            
            current_weather['main_weather'] = weather['weather'][0]['main']
            current_weather['description'] = weather['weather'][0]['description']
            current_weather['temp'] = weather['temp']
            current_weather['min_temp'] = r.json()['daily'][0]['temp']['min']
            current_weather['max_temp'] = r.json()['daily'][0]['temp']['max']
            current_weather['feels_like'] = weather['feels_like']
            current_weather['humidity'] = weather['humidity']
            current_weather['clouds'] = weather['clouds']
            current_weather['wind_speed'] = weather['wind_speed']
            current_weather['wind_direction'] = weather['wind_deg']
            current_weather['visibility'] = weather['visibility']
            current_weather['uv'] = weather['uvi']
            current_weather['rain_chance'] =  r.json()['hourly'][0]['pop']
            
            
            if location in current_locations:
                current_weathers[location] = current_weather
            
            daily_weather = []
            daily = r.json()['daily']
            
            for day in daily:
                dct = {}
                dct['main_weather'] = day['weather'][0]['main']
                dct['description'] = day['weather'][0]['description']
                dct['min_temp'] = day['temp']['min']
                dct['max_temp'] = day['temp']['max']
                daily_weather.append(dct)
                
            #add relevant urrent information to first day in daily
            daily_weather[0]['temp'] = weather['temp']
            daily_weather[0]['rain_chance'] = current_weather['rain_chance']
            daily_weather[0]['humidity'] = current_weather['humidity']
            daily_weather[0]['wind_speed'] = current_weather['wind_speed']
            daily_weather[0]['uv'] = current_weather['uv']
            daily_weather[0]['clouds'] = current_weather['clouds']
            daily_weather[0]['wind_speed'] = current_weather['wind_speed']
            daily_weather[0]['wind_direction'] = current_weather['wind_direction']
            daily_weather[0]['visibility'] = current_weather['visibility']
            
            if location in daily_locations:
                daily_weathers[location] = daily_weather
        
        
            
        
        all_current_settings['locations'] = current_weathers
        all_daily_settings['locations'] = daily_weathers
        print(all_current_settings, all_daily_settings)
        json.dump( all_current_settings,  open( "csv/current_weather.json", 'w+' ))
        json.dump( all_daily_settings,  open( "csv/daily_weather.json", 'w+' ))
    
    except Exception as e:
        exc_type, exc_obj, exc_tb = sys.exc_info()
        fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
        logf.write(str(e))
        logf.write('. file: ' + fname)
        logf.write('. line: ' +  str(exc_tb.tb_lineno))
        logf.write('. type: ' +  str(exc_type))
        logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
            
        
def updateLeagueTables(api_key, logf):
    
    url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/sports?stats='
    try:
        f = open('csv/league_tables.json', 'r')
        all_settings = json.load(f)
        print(all_settings['leagues'].keys())
        f.close()
        
        leagues = all_settings['leagues'].keys()
        leagues_info = {}
        
        for league in leagues:
            if league == 'PREMIERLEAGUE':
                url += 'PREMIERLEAGUE,'
            else:
                url += league + ','
            
        url = url[:-1] # remove last comma 
        url += '&apiKey=' + api_key
        r = requests.get(url)
          
        all_data = r.json()
        
        
        print('key', [all_data[i].keys() for i in range(len(all_data))])
        #print('key', all_data.keys())
        for i,l in enumerate(all_data):
            print(l)
            league = list(l.keys())[0]
            print('league', league, list(l.keys()))

            teams = []
            
            
            for d in all_data[i][league]:
                team = {}
                
               
                    
                team['name'] = d['strTeam']
                team['wins'] = d['intWin']
                team['loss'] = d['intLoss']
                team['draw'] = d['intDraw']
                #team['played'] = d['intPlayed']
                team['standing'] = d['intRank']
                #team['points'] = d['intPoints']
                
                teams.append(team)
            leagues_info[league.upper()] = teams
        
        all_settings['leagues'] = leagues_info
        print(all_settings['leagues'].keys())
        json.dump(all_settings,  open( "csv/league_tables.json".format(league), 'w+' ))
    except Exception as e:
        
        exc_type, exc_obj, exc_tb = sys.exc_info()
        fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
        logf.write(str(e))
        logf.write('. file: ' + fname)
        logf.write('. line: ' +  str(exc_tb.tb_lineno))
        logf.write('. type: ' +  str(exc_type))
        logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))

    
def updateLeagueEvents(api_key, time, logf):
    
    url = 'https://bm7p954xoh.execute-api.us-east-2.amazonaws.com/default/ScriptsAPI/sports?{}='.format(time)
    
    if time == 'past':
        
        f = open('csv/past_games.json')
    elif time == 'upcoming':
       
        f = open('csv/upcoming_games.json')
    elif time == 'livescore':
        f = open('csv/live_games.json')
    
    
        
    
    try:
        all_settings = json.load(f)
        print(all_settings['leagues'].keys())
        f.close()
        leagues = all_settings['leagues'].keys()
        leagues_info = {}
       
        for league in leagues:
            
            if league == 'PREMIERLEAGUE':
                url += 'PREMIERLEAGUE,'
            else:
                url += league + ','
        url = url[:-1] # remove last comma 
        url += '&apiKey=' + api_key
        
        r = requests.get(url)
        
        all_data = r.json()
        
        #print(all_data)
    
        
        
        for league in all_data.keys():
            events = []
            for d in all_data[league]:
                event = {}
                event['date'] = d['dateEvent']
                
                if time == 'live':
                   
                    event['progess'] = d['strProgress']
                    event['status'] = d['strStatus']
                else:
                    
                    event['round'] = d['intRound']
                event['time'] = d['strTime']
                event['home_team'] = d['strHomeTeam']
                
                event['away_team'] = d['strAwayTeam']
                
                if time != 'upcoming':
                    event['away_score'] = d['intAwayScore']
                    event['home_score'] = d['intHomeScore']
                
                events.append(event)
            leagues_info[league.upper()] = events
        all_settings['leagues'] = leagues_info
        print(all_settings['leagues'].keys())
        
        json.dump(all_settings,  open( "csv/{}_games.json".format(time), 'w+' ))
    except Exception as e:
        
        exc_type, exc_obj, exc_tb = sys.exc_info()
        fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
        logf.write(str(e))
        logf.write('. file: ' + fname)
        logf.write('. line: ' +  str(exc_tb.tb_lineno))
        logf.write('. type: ' +  str(exc_type))
        logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
      

def updateSports(api_key, logf):
    #read user settings to decide which sprots to update
   
    updateLeagueTables(api_key, logf)
    
    updateLeagueEvents(api_key,'livescore', logf) 
    updateLeagueEvents(api_key,'past', logf) 
    updateLeagueEvents(api_key,'upcoming', logf) 




def checkStocks(last_update, update_frequency):
    NY_time = datetime.now(NY_zone).replace(tzinfo=None)
    opening = NY_time.replace(hour=9, minute=30, second=0, microsecond=0).replace(tzinfo=None)
    closing = NY_time.replace(hour=16, minute=0, second=0, microsecond=0).replace(tzinfo=None)
    
    
    f = open('csv/stocks_settings.json', 'r')
    all_stocks_settings = json.load(f)
    f.close()
    stock_info = all_stocks_settings['symbols']
    symbols = list(stock_info.keys())
    
    updated = False

    diff = (NY_time - last_update).total_seconds()/60 #minutes
    if opening < NY_time < closing and datetime.today().weekday() < 5: # we need to do real time updating
        
        
        if diff >= update_frequency:
            updated = True
            
        
        
    elif emptyInfo(symbols, stock_info): # if theres any empty stocks
        updated = True
        
        
    
    else:
        # update if last update was before the previous days closing
        yday_closing = closing - dt.timedelta(days=1)
        yday_str = yday_closing.strftime("%d/%m/%Y %H:%M:%S")
        yday_closing = datetime.strptime(yday_str, "%d/%m/%Y %H:%M:%S")
        
        if last_update < yday_closing:
            updated = True
            
            
    return updated


def updateAll(api_key, weather_key, logf):
    updateStocks(api_key, logf)

    updateCrypto(api_key, logf)

    updateForex(api_key, logf)

    updateNews(api_key, logf)
 
    updateSports(api_key, logf)
    
    if weather_key:
        updateWeather(weather_key, logf)
    
            
if __name__ == '__main__':
    logf = open("log.txt", "a")
    
    t = time.time()
    
    max_stocks = 200
    max_crypto = 100
    
    
    
    
    
    
    

    update_frequencies = {'stocks':2, 'crypto':1, 'news':120, 'weather': 120, 'sports': 120} #minutes

    NY_zone = pytz.timezone('America/New_York')
    CET_zone = pytz.timezone('Europe/Berlin')

    NY_time = datetime.now(NY_zone)

    CET_time = datetime.now(CET_zone)
    
    NY_str = NY_time.strftime("%d/%m/%Y %H:%M:%S")
    CET_str = NY_time.strftime("%d/%m/%Y %H:%M:%S")
    
    #f = open('csv/last_updates.json', 'w+')
    #update_times = {'stocks':NY_str, 'crypto':NY_str, 'news':NY_str, 'weather': NY_str, 'forex': CET_str} # all in NY time apart from forex in CET
    #json.dump(update_times, f)
    #f.close()
  
    f = open('api_keys.txt')
    
    api_keys = f.readlines()
    api_key = api_keys[0].strip()
    

    try:
        weather_key = api_keys[1].strip()
    except Exception as e:
        weather_key = False
        exc_type, exc_obj, exc_tb = sys.exc_info()
        fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
        logf.write(str(e))
        logf.write('. file: ' + fname)
        logf.write('. line: ' +  str(exc_tb.tb_lineno))
        logf.write('. type: ' +  str(exc_type))
        logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))
    try:
        f = open('csv/last_updates.json', 'r')
        last_updates = json.load(f)
        f.close()
        
    except:
        last_updates = {"stocks": "27/06/2021 07:05:39", "crypto": "27/06/2021 07:05:39", "news": "27/06/2021 07:05:39", "weather": "27/06/2021 07:05:39", "forex": "27/06/2021 07:05:39", "sports": "27/06/2021 07:05:39"} 
    
    t = time.time()

    try:
        while True:
            
            
            NY_time = datetime.now(NY_zone).replace(tzinfo=None)
         
            msg = getInput()
            if msg == 'A':
                updateAll(api_key, weather_key, logf)
            
            #stocks
            stock_time = datetime.strptime(last_updates['stocks'], "%d/%m/%Y %H:%M:%S")
            stock_frequency = update_frequencies['stocks']
            diff = (NY_time - stock_time).total_seconds()/60 #minutes
            if diff >= update_frequencies['stocks'] or msg == 's':
                stock_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
                last_updates['stocks'] = stock_time
                updateStocks(api_key, logf)
                
            # crypto
            crypto_time = datetime.strptime(last_updates['crypto'], "%d/%m/%Y %H:%M:%S")
           
            
            NY_time = datetime.now(NY_zone).replace(tzinfo=None)
            diff = (NY_time - crypto_time).total_seconds()/60 #minutes
            if diff >= update_frequencies['crypto'] or msg == 'c':
                crypto_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
                updateCrypto(api_key, logf)
                last_updates['crypto'] = crypto_time
                    
                    
            # weather
            weather_time = datetime.strptime(last_updates['weather'], "%d/%m/%Y %H:%M:%S")
        
            
            NY_time = datetime.now(NY_zone).replace(tzinfo=None)
            diff = (NY_time - weather_time).total_seconds()/60 #minutes
            if diff >= update_frequencies['weather'] or msg == 'w':
                weather_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
                updateWeather(weather_key, logf)
                last_updates['weather'] = weather_time
                
                
            # news
            news_time = datetime.strptime(last_updates['news'], "%d/%m/%Y %H:%M:%S")
           
            
            NY_time = datetime.now(NY_zone).replace(tzinfo=None)
            diff = (NY_time - news_time).total_seconds()/60 #minutes
            if diff >= update_frequencies['news'] or msg == 'n':
                news_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
                updateNews(api_key, logf)
                last_updates['news'] = news_time
                
            # sports
            sports_time = datetime.strptime(last_updates['sports'], "%d/%m/%Y %H:%M:%S")
           
            
            NY_time = datetime.now(NY_zone).replace(tzinfo=None)
            diff = (NY_time - sports_time).total_seconds()/60 #minutes
            if diff >= update_frequencies['sports'] or msg == 'S':
                sports_time = NY_time.strftime("%d/%m/%Y %H:%M:%S")
                updateSports(api_key, logf)
                last_updates['sports'] = sports_time
                
            #forex updates once every 24hours at 1700 CET
            
            # update if last update was before the previous days closing
            forex_time = datetime.strptime(last_updates['forex'], "%d/%m/%Y %H:%M:%S")
            CET_time = datetime.now(CET_zone).replace(tzinfo=None)
            yday_update = (CET_time.replace(hour=17, minute=00, second=0, microsecond=0) - dt.timedelta(days=1)).replace(tzinfo=None)
        
            if forex_time < yday_update or msg == 'f':
                forex_time = CET_time.strftime("%d/%m/%Y %H:%M:%S")
                last_updates['forex'] = forex_time  
                updateForex(api_key, logf)
                
         
            json.dump(last_updates, open('csv/last_updates.json', 'w+'))
       
                
    except Exception as e: 
        exc_type, exc_obj, exc_tb = sys.exc_info()
        fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
        logf.write(str(e))
        logf.write('. file: ' + fname)
        logf.write('. line: ' +  str(exc_tb.tb_lineno))
        logf.write('. type: ' +  str(exc_type))
        logf.write('\n ' + "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])))