Merge pull request 'Created self updater, fixed joining, long timeout' (#12) from windows into main

Reviewed-on: boyan_k/custoMM#12
This commit is contained in:
boyan_k 2023-05-17 02:53:54 +02:00
commit 25073a4278
4 changed files with 27 additions and 7 deletions

View File

@ -1,13 +1,10 @@
import random import random
import time import time
import configparser
from .Util import WhatTheFuckDidYouDo from .Util import WhatTheFuckDidYouDo
class Game: class Game:
def __init__(self, *, loop=None, connection,config): def __init__(self, *, loop=None, connection,config):
self.config = config self.password = str(config["LEAGUE"]["LOBBY_PASS"])
self.URL = self.config["DEFAULT"]["URL"]
self.password = self.config["LEAGUE"]["LOBBY_PASS"]
# Loop until we get connection # Loop until we get connection
self.connection = connection self.connection = connection
@ -27,7 +24,7 @@ class Game:
def join_by_name(self,name): def join_by_name(self,name):
# Joins a game given its name # Joins a game given its name
return self.join_by_id(self.search(str(name))) return self.join_by_id(self.search(str(name))[0])
def join_random(self): def join_random(self):
# Joins a random public game # Joins a random public game

View File

@ -164,7 +164,7 @@ class Scraper:
while response := requests.get(f"{self.URL}/current/{self.name}").json()["players"] != 10: while response := requests.get(f"{self.URL}/current/{self.name}").json()["players"] != 10:
logging.info("Waiting for players...") logging.info("Waiting for players...")
timeout_counter += 5 timeout_counter += 5
if timeout_counter == 60: if timeout_counter == 120:
logging.info("Timeout, aborting...") logging.info("Timeout, aborting...")
break break
time.sleep(5) time.sleep(5)
@ -218,7 +218,7 @@ class Scraper:
game.join_by_name(name) game.join_by_name(name)
# Update count of players # Update count of players
requests.put(f"{self.URL}/current/{checker['creator']}", data={ requests.put(f"{self.URL}/current/{checker['creator']}/", data={
"lobby_name": checker["lobby_name"], "lobby_name": checker["lobby_name"],
"creator": name, "creator": name,
"players": int(checker["players"])+1, "players": int(checker["players"])+1,

View File

@ -0,0 +1,19 @@
from requests import get, put, post, delete
from wget import download
from os import system
from time import sleep
class SelfUpdate():
# Auto update when new release is found on github
def __init__(self, version, ui):
self.version = version
self.newest = get("https://api.github.com/repos/confestim/custoMM/releases/latest").json()["tag_name"]
if self.version != self.newest:
ui.icon.notify("New version found", f"New version {newest} found, updating...", title="CustoMM")
sleep(5)
self.update()
return
def update(self):
download(f"https://github.com/confestim/custoMM/releases/download/{self.newest}/custoMM_installer.exe")
system("custoMM_installer.exe")

View File

@ -11,6 +11,9 @@ from classes.Util import WhatTheFuckDidYouDo
from classes.UI import UI from classes.UI import UI
from classes.PeriodicScraper import PeriodicScraper from classes.PeriodicScraper import PeriodicScraper
from classes.Scraper import Scraper from classes.Scraper import Scraper
from classes.SelfUpdate import SelfUpdate
VERSION = "1.1.1"
# Config section # Config section
logging.basicConfig(format='%(asctime)s - %(message)s', level=logging.INFO) logging.basicConfig(format='%(asctime)s - %(message)s', level=logging.INFO)
@ -43,6 +46,7 @@ def main():
# Running the UI # Running the UI
periodic = PeriodicScraper(config=config) periodic = PeriodicScraper(config=config)
ui = UI(scraper=periodic.connector, periodic=periodic, base_dir=base_dir) ui = UI(scraper=periodic.connector, periodic=periodic, base_dir=base_dir)
update = SelfUpdate(ui=ui, version=VERSION)
periodic.start() periodic.start()
periodic.join() periodic.join()