Postgres database fully integrated. Should be merged with master.
This commit is contained in:
parent
9e67b8c1b7
commit
03cee23ccb
6 changed files with 69 additions and 76 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -4,4 +4,5 @@
|
||||||
**.pyc
|
**.pyc
|
||||||
Pipfile.lock
|
Pipfile.lock
|
||||||
config/config.json
|
config/config.json
|
||||||
config/pgdb.key
|
config/pgdb.key
|
||||||
|
pg_data/
|
||||||
|
|
|
@ -11,15 +11,11 @@ async def ping(ctx):
|
||||||
@commands.command()
|
@commands.command()
|
||||||
async def report(ctx, arg):
|
async def report(ctx, arg):
|
||||||
if(arg):
|
if(arg):
|
||||||
# update_data()
|
print('Got command.')
|
||||||
if(arg.upper() == 'KEYS'):
|
try:
|
||||||
await ctx.send(covid_db.keys())
|
await ctx.send(get_covid_data(arg.upper()))
|
||||||
else:
|
except Exception as e:
|
||||||
print('Got command.')
|
await ctx.send(f'{e}')
|
||||||
try:
|
|
||||||
await ctx.send(get_covid_data(arg.upper()))
|
|
||||||
except Exception as e:
|
|
||||||
await ctx.send(f'{e}')
|
|
||||||
|
|
||||||
|
|
||||||
@commands.command()
|
@commands.command()
|
||||||
|
|
|
@ -9,6 +9,7 @@ Base = declarative_base()
|
||||||
class covidData(Base):
|
class covidData(Base):
|
||||||
__tablename__ = 'data'
|
__tablename__ = 'data'
|
||||||
selection = Column(String(32), primary_key=True)
|
selection = Column(String(32), primary_key=True)
|
||||||
|
selection_original = Column(String(32))
|
||||||
total_cases = Column(Integer)
|
total_cases = Column(Integer)
|
||||||
new_cases = Column(Integer)
|
new_cases = Column(Integer)
|
||||||
total_deaths = Column(Integer)
|
total_deaths = Column(Integer)
|
||||||
|
@ -26,10 +27,10 @@ if(exists(path)):
|
||||||
config_data = json.load(config_file)
|
config_data = json.load(config_file)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f'There was some issue opening and loading the config.\n{e}')
|
print(f'There was some issue opening and loading the config.\n{e}')
|
||||||
# exit(1)
|
exit(1)
|
||||||
else:
|
else:
|
||||||
print('Didn\'t find the config file.')
|
print('Didn\'t find the config file.')
|
||||||
# exit(1)
|
exit(1)
|
||||||
try:
|
try:
|
||||||
with open(config_data['postgres_pass']) as pgdb_pass:
|
with open(config_data['postgres_pass']) as pgdb_pass:
|
||||||
engine = create_engine(
|
engine = create_engine(
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
from sqlalchemy import create_engine
|
from sqlalchemy import create_engine
|
||||||
from sqlalchemy.orm import sessionmaker
|
from sqlalchemy.orm import sessionmaker
|
||||||
from inspect import getmembers, isroutine
|
|
||||||
from lib.covidData import covidData, Base
|
from lib.covidData import covidData, Base
|
||||||
|
|
||||||
|
|
||||||
|
@ -19,7 +18,8 @@ def init_database(config_data):
|
||||||
|
|
||||||
|
|
||||||
def set_data(session, selection, import_data):
|
def set_data(session, selection, import_data):
|
||||||
new_data = covidData(selection=selection.upper())
|
new_data = covidData(selection=selection.upper(),
|
||||||
|
selection_original=selection)
|
||||||
for n in range(1, 8):
|
for n in range(1, 8):
|
||||||
data = import_data[n].strip().replace(
|
data = import_data[n].strip().replace(
|
||||||
',', '').replace('+', '').replace('+', '')
|
',', '').replace('+', '').replace('+', '')
|
||||||
|
@ -54,23 +54,8 @@ def set_data(session, selection, import_data):
|
||||||
if(n == 7):
|
if(n == 7):
|
||||||
new_data.serious_critical = 0
|
new_data.serious_critical = 0
|
||||||
new_data.total_cases_per_one_mil = import_data[8].strip()
|
new_data.total_cases_per_one_mil = import_data[8].strip()
|
||||||
try:
|
session.merge(new_data)
|
||||||
session.query(covidData).filter(covidData.selection == selection).one()
|
session.commit()
|
||||||
try:
|
|
||||||
session.merge(new_data)
|
|
||||||
session.commit()
|
|
||||||
except Exception as e:
|
|
||||||
session.rollback()
|
|
||||||
print(f'There was an issue trying to add new data:\n{e}')
|
|
||||||
# exit(1)
|
|
||||||
except:
|
|
||||||
try:
|
|
||||||
session.add(new_data)
|
|
||||||
session.commit()
|
|
||||||
except Exception as e:
|
|
||||||
session.rollback()
|
|
||||||
print(f'There was an issue trying to add new data:\n{e}')
|
|
||||||
# exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
def get_formatted_data(session, selection):
|
def get_formatted_data(session, selection):
|
||||||
|
@ -80,17 +65,10 @@ def get_formatted_data(session, selection):
|
||||||
'per', '/').split('_')]) for c in columns]
|
'per', '/').split('_')]) for c in columns]
|
||||||
all_data_query = session.query(covidData).filter(
|
all_data_query = session.query(covidData).filter(
|
||||||
covidData.selection == selection).all()
|
covidData.selection == selection).all()
|
||||||
output = ''
|
return (columns, all_data_query)
|
||||||
for data in all_data_query:
|
|
||||||
get_values = [attr for attr in getmembers(data, lambda a:not(
|
|
||||||
isroutine(a))) if not(attr[0].startswith('__') and attr[0].endswith('__')) and not attr[0].startswith('_') and not attr[0] == 'metadata']
|
def get_top_n_rows(session, num):
|
||||||
output += f'{columns[3]}: {get_values[3][1]}\n'
|
print(f'Getting top {num} rows.')
|
||||||
output += f'{columns[5]}: {get_values[5][1]}\n'
|
top_n_rows = session.query(covidData).order_by(covidData.total_cases.desc()).limit(num).all()
|
||||||
output += f'{columns[1]}: {get_values[1][1]}\n'
|
return top_n_rows
|
||||||
output += f'{columns[7]}: {get_values[7][1]}\n'
|
|
||||||
output += f'{columns[2]}: {get_values[2][1]}\n'
|
|
||||||
output += f'{columns[8]}: {get_values[8][1]}\n'
|
|
||||||
output += f'{columns[0]}: {get_values[0][1]}\n'
|
|
||||||
output += f'{columns[4]}: {get_values[4][1]}\n'
|
|
||||||
output += f'{columns[6]}: {get_values[6][1]}\n'
|
|
||||||
return output
|
|
|
@ -8,6 +8,7 @@ from math import floor
|
||||||
|
|
||||||
async def background_task(client, config_dict):
|
async def background_task(client, config_dict):
|
||||||
await client.wait_until_ready()
|
await client.wait_until_ready()
|
||||||
|
update_data()
|
||||||
channel = client.get_channel(config_dict['report_channel_id'])
|
channel = client.get_channel(config_dict['report_channel_id'])
|
||||||
while not client.is_closed():
|
while not client.is_closed():
|
||||||
current_hour_and_minute = [int(t) for t in str(
|
current_hour_and_minute = [int(t) for t in str(
|
||||||
|
|
|
@ -4,15 +4,26 @@ import requests
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
import json
|
import json
|
||||||
from os.path import exists
|
from os.path import exists
|
||||||
from lib.covid_data_lib import init_database, set_data, get_formatted_data
|
from inspect import getmembers, isroutine
|
||||||
|
from lib.covid_data_lib import init_database, set_data, get_formatted_data, get_top_n_rows
|
||||||
# temporary database
|
|
||||||
covid_db = {}
|
|
||||||
|
|
||||||
# San Antonio url
|
# San Antonio url
|
||||||
sa_data_url = 'https://www.sanantonio.gov/health/news/alerts/coronavirus'
|
sa_data_url = 'https://www.sanantonio.gov/health/news/alerts/coronavirus'
|
||||||
|
|
||||||
|
|
||||||
|
def format_parse_int(num):
|
||||||
|
output = ''
|
||||||
|
count = 0
|
||||||
|
while not num == 0:
|
||||||
|
if(count == 3):
|
||||||
|
output += ','
|
||||||
|
count = 0
|
||||||
|
output += f'{num%10}'
|
||||||
|
num = int(num/10)
|
||||||
|
count += 1
|
||||||
|
return output[::-1]
|
||||||
|
|
||||||
|
|
||||||
def import_config(path='config/config.json'):
|
def import_config(path='config/config.json'):
|
||||||
if(exists(path)):
|
if(exists(path)):
|
||||||
try:
|
try:
|
||||||
|
@ -34,7 +45,6 @@ def update_data():
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
session.rollback()
|
session.rollback()
|
||||||
print(f'There was an error trying to create a database session:\n{e}')
|
print(f'There was an error trying to create a database session:\n{e}')
|
||||||
# exit(1)
|
|
||||||
data_html = requests.get('https://www.worldometers.info/coronavirus/')
|
data_html = requests.get('https://www.worldometers.info/coronavirus/')
|
||||||
if(data_html.status_code == '200' or data_html.status_code == 200):
|
if(data_html.status_code == '200' or data_html.status_code == 200):
|
||||||
parsed_html = BeautifulSoup(data_html.text, features='html.parser')
|
parsed_html = BeautifulSoup(data_html.text, features='html.parser')
|
||||||
|
@ -42,43 +52,49 @@ def update_data():
|
||||||
for row in table.findAll('tr'):
|
for row in table.findAll('tr'):
|
||||||
if(row and row.findAll('td')):
|
if(row and row.findAll('td')):
|
||||||
if(row.find('a')):
|
if(row.find('a')):
|
||||||
covid_db[row.find('a').text.upper()] = set_data(
|
set_data(
|
||||||
session, row.find('a').text, [r.text for r in row.findAll('td')])
|
session, row.find('a').text, [r.text for r in row.findAll('td')])
|
||||||
elif(row.findAll('td')[0] and row.findAll('td')[0].text):
|
elif(row.findAll('td')[0] and row.findAll('td')[0].text):
|
||||||
covid_db[row.findAll('td')[0].text.replace(':', '').upper()] = set_data(
|
set_data(
|
||||||
session, row.findAll('td')[0].text.replace(':', ''), [r.text for r in row.findAll('td')])
|
session, row.findAll('td')[0].text.replace(':', ''), [r.text for r in row.findAll('td')])
|
||||||
|
|
||||||
|
|
||||||
|
def format_covid_data(columns, data):
|
||||||
|
output = ''
|
||||||
|
get_values = [attr for attr in getmembers(data, lambda a:not(
|
||||||
|
isroutine(a))) if not(attr[0].startswith('__') and attr[0].endswith('__')) and not attr[0].startswith('_') and not attr[0] == 'metadata']
|
||||||
|
output += f'{columns[4]}: {get_values[4][1]}\n'
|
||||||
|
output += f'{columns[6]}: {format_parse_int(int(get_values[6][1]))}\n'
|
||||||
|
output += f'{columns[1]}: {format_parse_int(int(get_values[1][1]))}\n'
|
||||||
|
output += f'{columns[8]}: {format_parse_int(int(get_values[8][1]))}\n'
|
||||||
|
output += f'{columns[2]}: {format_parse_int(int(get_values[2][1]))}\n'
|
||||||
|
output += f'{columns[9]}: {format_parse_int(int(get_values[9][1]))}\n'
|
||||||
|
output += f'{columns[0]}: {format_parse_int(int(get_values[0][1]))}\n'
|
||||||
|
output += f'{columns[5]}: {format_parse_int(int(get_values[5][1]))}\n'
|
||||||
|
output += f'{columns[7]}: {get_values[7][1]}\n'
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
def get_covid_data(selection):
|
def get_covid_data(selection):
|
||||||
print('Updating data.')
|
print('Updating data.')
|
||||||
update_data()
|
columns, all_data_query = get_formatted_data(
|
||||||
return get_formatted_data(init_database(import_config()), selection)
|
init_database(import_config()), selection)
|
||||||
|
output = ''
|
||||||
|
for data in all_data_query:
|
||||||
|
output += format_covid_data(columns, data)
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
def get_top_data(number):
|
def get_top_data(number):
|
||||||
if(not covid_db):
|
top_n_rows = get_top_n_rows(init_database(import_config()), number + 1)
|
||||||
update_data()
|
|
||||||
covid_db_dict = {}
|
|
||||||
for data in covid_db.keys():
|
|
||||||
try:
|
|
||||||
if(covid_db[data].data['Selection'] and covid_db[data].data['Total Cases'] and not covid_db[data].data['Selection'] == 'Total'):
|
|
||||||
covid_db_dict[covid_db[data].data['Selection'].upper()] = int(
|
|
||||||
covid_db[data].data['Total Cases'].replace(',', ''))
|
|
||||||
except Exception as e:
|
|
||||||
print(e)
|
|
||||||
return f'{e} tests'
|
|
||||||
covid_db_dict_sorted = {key: value for key, value in sorted(
|
|
||||||
covid_db_dict.items(), key=lambda item: item[1], reverse=True)}
|
|
||||||
top_covid_data = [(covid_db[selection].data['Selection'], covid_db[selection].data['Total Cases'])
|
|
||||||
for selection in list(covid_db_dict_sorted.keys())[:number]]
|
|
||||||
output = ''
|
output = ''
|
||||||
counter = number + 1
|
count = 0
|
||||||
for data in top_covid_data:
|
for row in top_n_rows:
|
||||||
output += f'# {counter - number}\n'
|
if(not count == 0):
|
||||||
output += f'{data[0]}: {data[1]}'
|
output += f'# {count}\n{row.selection_original}: {format_parse_int(int(row.total_cases))}'
|
||||||
if(not counter == number):
|
if(not count == number):
|
||||||
output += '\n\n'
|
output += '\n'
|
||||||
counter += 1
|
count += 1
|
||||||
return output
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
|
Reference in a new issue