From 2644c48d81dbb340084e41570326a4e3cca2f2ab Mon Sep 17 00:00:00 2001 From: Boyan Date: Tue, 16 Apr 2024 17:20:49 +0200 Subject: [PATCH] Should probably work now. IDK. Need SSH. Anyway, I should focus on the csv format, as we won't be creating an interface for the data. --- src/main.py | 70 +++++++++++++++++++++++++++---------- src/{test.json => out.json} | 0 2 files changed, 52 insertions(+), 18 deletions(-) rename src/{test.json => out.json} (100%) diff --git a/src/main.py b/src/main.py index 6e40fed..f956506 100644 --- a/src/main.py +++ b/src/main.py @@ -3,41 +3,75 @@ from bs4 import BeautifulSoup from chardet import detect import csv from json import dumps -# Dummy + ROOT = "http://192.168.2.20" -def bundle_file(url:str): +# Turns a csv reader into a list of dictionaries(json) +def reader_to_json(reader:csv.DictReader) -> list[dict]: + arr = [] + for row in reader: + # No whitespaces + row = {k.strip():v.strip() for k,v in row.items()} + arr.append(row) + return arr + +# Gets a csv file from the server and turns it into a list of dictionaries(json) +def get_json(url:str) -> list[dict]: # get the file raw = get(url).content - encoding = detect(raw)['encoding'] - raw = raw.decode(encoding) + print(raw) + # Figure out the encoding + encoding = detect(raw)['encoding'] + raw = raw.decode('utf-8') + # turn it into json reader = csv.DictReader(raw) - for row in reader: - for key in row: - row[key] = row[key].strip() - arr.append(row) - - # dump to file - with open("out.json", "w") as f: - f.write(dumps(arr, indent=2)) + arr = reader_to_json(reader) return arr + +# DEBUG function. Loads a local file +def local_file(path:str) -> list[dict]: + with open(path, "r") as i: + reader:list = reader_to_json(csv.DictReader(i)) - - -def get_file(entries:int): + with open("out.json", "w") as o: + o.write(dumps(reader, indent=2)) + + return reader + +# Gets file from the server +def get_file(entries:int) -> dict: url = f"{ROOT}/Portal/Portal.mwsl?PriNav=DataLog&RecentCount={entries}" soup = BeautifulSoup(get(url).content, "html.parser") link = soup.find(id="link1") - return bundle_file(ROOT + link['href']) + if link: + link = link.get("href") + link = f"{ROOT}{link}" + return get_json(link) + raise ValueError("No link found") + # DEBUG: Get file locally + # return local_file("test.csv") + +# Saves a json file +def save_file(json:list[dict], filename:str) -> None: + try: + with open(filename, "w") as o: + o.write(dumps(json, indent=2)) + except Exception as exc: + raise IndexError("Error saving file") from exc + def main(): - file = get_file(10) - print(file) + # file = get_file(10) + + # DEBUG: local test file + file = local_file("test.csv") + # Save file + save_file(file, "out.json") if __name__ == "__main__": main() \ No newline at end of file diff --git a/src/test.json b/src/out.json similarity index 100% rename from src/test.json rename to src/out.json