Initial, have to add weekly analyzer function
This commit is contained in:
parent
326eb8476f
commit
03d6a784fd
9
history.json
Normal file
9
history.json
Normal file
@ -0,0 +1,9 @@
|
||||
{
|
||||
"15/12/2021": {
|
||||
"Confirmed": 718651,
|
||||
"New Cases": 851,
|
||||
"Tests": 28810,
|
||||
"Positive Percentage": 3,
|
||||
"Vaccine Percentage": 27
|
||||
}
|
||||
}
|
100
main.py
Normal file
100
main.py
Normal file
@ -0,0 +1,100 @@
|
||||
from bs4 import BeautifulSoup
|
||||
import requests
|
||||
from time import sleep
|
||||
from datetime import date, timedelta
|
||||
from discord_webhook import DiscordWebhook, DiscordEmbed
|
||||
import json
|
||||
from math import ceil
|
||||
|
||||
# while True:
|
||||
TODAY = str(date.today().strftime("%d/%m/%Y"))
|
||||
YESTERDAY = date.today() - timedelta(days = 1)
|
||||
YESTERDAY = str(YESTERDAY.strftime("%d/%m/%Y"))
|
||||
|
||||
def write_json(new_data, filename='history.json'):
|
||||
with open(filename,'r+') as file:
|
||||
|
||||
file_data = json.load(file)
|
||||
file_data[TODAY] = new_data
|
||||
file.seek(0)
|
||||
json.dump(file_data, file, indent = 4)
|
||||
|
||||
def get_data():
|
||||
|
||||
r = requests.get("https://coronavirus.bg/bg")
|
||||
r = r.text
|
||||
|
||||
soup = BeautifulSoup(r, 'html.parser')
|
||||
new_confirmed = int(soup.find("p", {"class": "statistics-value confirmed"}).text.replace(" ", ""))
|
||||
tests = soup.find("div",{"class":"col-lg-3 col-md-6"})
|
||||
|
||||
# I JUST FOUND OUT THERE ARE HIDDEN JSONS IN THE PAGE
|
||||
# Remember kids, measure twice, cut once
|
||||
|
||||
r = requests.get("https://coronavirus.bg/statistics/vaccinated-agegroups.json").json()
|
||||
vaxxed = 0
|
||||
for i in r["data"].keys():
|
||||
vaxxed += r["data"][i]
|
||||
|
||||
|
||||
r = requests.get("https://www.worldometers.info/world-population/bulgaria-population/")
|
||||
soup = BeautifulSoup(r.text, "html.parser")
|
||||
current_population = int(soup.find("div", class_="col-md-8 country-pop-description").find_all_next("strong")[1].text.replace(",",""))
|
||||
|
||||
|
||||
for i in tests.children:
|
||||
if "statistics-subvalue" in str(i):
|
||||
tests = int(str(i).split(">")[1].split("<")[0].replace(" ",""))
|
||||
|
||||
with open("history.json", "r") as f:
|
||||
data = json.load(f)
|
||||
try:
|
||||
yesterday = data[YESTERDAY]
|
||||
new_cases = new_confirmed - data[YESTERDAY]["Confirmed"]
|
||||
except Exception as e:
|
||||
yesterday = 0
|
||||
new_cases = 1
|
||||
print(e, "Passing...")
|
||||
|
||||
new_data = {
|
||||
"Confirmed":new_confirmed,
|
||||
"New Cases": new_cases,
|
||||
"Tests":tests,
|
||||
"Positive Percentage": ceil(new_cases/tests * 100),
|
||||
"Vaccine Percentage":ceil(vaxxed / current_population * 100)
|
||||
}
|
||||
print(new_data)
|
||||
write_json(new_data)
|
||||
return new_data
|
||||
|
||||
def discord_daily():
|
||||
webhook = DiscordWebhook(url="https://discord.com/api/webhooks/920787339146575943/ojsBB9nlWPnLp3ncW726_4UrckXOHBEHB5YJ7KDrwAnn_6tQTmDWitkY6yaxsxkeLnKC")
|
||||
data = get_data()
|
||||
|
||||
# Color coding levels of danger
|
||||
if data["Positive Percentage"] >= 10:
|
||||
color = "fff00"
|
||||
elif data["Positive Percentage"] >= 30:
|
||||
color = "ff000"
|
||||
elif data["Positive Percentage"] >= 50:
|
||||
color = "ffa500"
|
||||
elif data["Positive Percentage"] >= 80:
|
||||
color = "000000"
|
||||
else:
|
||||
color = "00ff00"
|
||||
|
||||
embed = DiscordEmbed(title=f'Covid Daily {TODAY}', description='Daily briefing!', color=color)
|
||||
|
||||
for i in data.keys():
|
||||
if "Percentage" in i:
|
||||
embed.add_embed_field(name=str(i), value=str(data[i])+"%")
|
||||
else:
|
||||
embed.add_embed_field(name=str(i), value=str(data[i]))
|
||||
|
||||
embed.set_footer(text='Data taken from https://worldometer.com and https://coronavirus.bg')
|
||||
|
||||
webhook.add_embed(embed)
|
||||
webhook.execute()
|
||||
|
||||
discord_daily()
|
||||
|
Reference in New Issue
Block a user