mirror of
https://github.com/Code-For-Groningen/temmies.git
synced 2025-07-04 12:34:57 +02:00
Compare commits
52 Commits
e3d863d7b2
...
1.2.111
Author | SHA1 | Date | |
---|---|---|---|
6a781ad238 | |||
a1104522f1 | |||
fb8b5cd454 | |||
1367fd667f | |||
1516ef74be | |||
c37edb59c6 | |||
82a072ee14 | |||
3d9ba063ce | |||
82bafa1925 | |||
52d9d86260 | |||
da4705b56a | |||
f6e6bc28d2 | |||
2fa3bfbad8 | |||
9f99df54d8 | |||
514fcd2438 | |||
812f384c3d | |||
34bf29d274 | |||
f3d9a987f6 | |||
cb6ec3cc1c | |||
0ff3f28978 | |||
de66e9762e | |||
1a950c0eb2 | |||
a3a9f5dfee | |||
461b506be5 | |||
a0aeca87b1 | |||
17f0bd237a | |||
2378be4d42 | |||
6a63abf7ae | |||
019e2c181e | |||
135420d488 | |||
aab574cdb0 | |||
9d92db4644 | |||
2c2d39c6dd
|
|||
54e086c93a
|
|||
667a388da4
|
|||
2daee84d4f
|
|||
cd5e9b2c8d
|
|||
de6262a4bb | |||
8325a30af8 | |||
0f92d05bec | |||
2785ec86e9 | |||
78aade7c8c | |||
11864cae6b | |||
27d21ac7c1 | |||
df8429d811 | |||
3c63a64eac | |||
b819305704 | |||
1016e56e70 | |||
b1eface45d | |||
b8a6e05ea0 | |||
c0056a27d1 | |||
cff77bcc95 |
10
.gitignore
vendored
10
.gitignore
vendored
@ -1,6 +1,11 @@
|
|||||||
# Config
|
# Config - Testing
|
||||||
config.py
|
config.py
|
||||||
baller.py
|
tests/
|
||||||
|
pathfinding/
|
||||||
|
test.py
|
||||||
|
|
||||||
|
#Doc env
|
||||||
|
.docs_env
|
||||||
|
|
||||||
# Byte-compiled / optimized / DLL files
|
# Byte-compiled / optimized / DLL files
|
||||||
__pycache__/
|
__pycache__/
|
||||||
@ -326,3 +331,4 @@ cython_debug/
|
|||||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||||
#.idea/
|
#.idea/
|
||||||
|
project-hierarchy.txt
|
||||||
|
@ -3,12 +3,10 @@ version: 2
|
|||||||
|
|
||||||
build:
|
build:
|
||||||
os: ubuntu-22.04
|
os: ubuntu-22.04
|
||||||
|
|
||||||
tools:
|
tools:
|
||||||
python: "3.12"
|
python: "3.8"
|
||||||
|
|
||||||
sphinx:
|
|
||||||
configuration: docs/conf.py
|
|
||||||
|
|
||||||
python:
|
mkdocs:
|
||||||
install:
|
configuration: mkdocs.yml
|
||||||
- requirements: docs/requirements.txt
|
|
30
Folder.py
30
Folder.py
@ -1,30 +0,0 @@
|
|||||||
# Module to handle each assignment (most difficult part)
|
|
||||||
|
|
||||||
from Base import Base
|
|
||||||
from Exercise import Exercise
|
|
||||||
from requests import Session
|
|
||||||
|
|
||||||
|
|
||||||
class Assignment(Base):
|
|
||||||
def __init__(self, url:str, name:str, session:Session, parent):
|
|
||||||
super().__init__(url, name, session, parent)
|
|
||||||
self.download = Downloadable(name, session, self)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return f"Assignment {self.name} in course {self.parent.name}"
|
|
||||||
|
|
||||||
def getExercises(self) -> list[Exercise]:
|
|
||||||
# Find li large
|
|
||||||
ul = self.soup.find('ul', class_='round')
|
|
||||||
|
|
||||||
# Turn each li to an exercise instance
|
|
||||||
return self.liLargeToExercises(ul, self.session, self)
|
|
||||||
|
|
||||||
def getExercise(self, name:str) -> Exercise:
|
|
||||||
# Get the exercise
|
|
||||||
r = self.session.get(self.url)
|
|
||||||
soup = BeautifulSoup(r.text, 'lxml')
|
|
||||||
# Search by name
|
|
||||||
exercise = soup.find('a', text=name)
|
|
||||||
# Get the url and transform it into an exercise object
|
|
||||||
return Exercise(url=exercise['href'], name=name, session=self.session, assignment=self)
|
|
20
Makefile
20
Makefile
@ -1,20 +0,0 @@
|
|||||||
# Minimal makefile for Sphinx documentation
|
|
||||||
#
|
|
||||||
|
|
||||||
# You can set these variables from the command line, and also
|
|
||||||
# from the environment for the first two.
|
|
||||||
SPHINXOPTS ?=
|
|
||||||
SPHINXBUILD ?= sphinx-build
|
|
||||||
SOURCEDIR = source
|
|
||||||
BUILDDIR = build
|
|
||||||
|
|
||||||
# Put it first so that "make" without argument is like "make help".
|
|
||||||
help:
|
|
||||||
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
|
||||||
|
|
||||||
.PHONY: help Makefile
|
|
||||||
|
|
||||||
# Catch-all target: route all unknown targets to Sphinx using the new
|
|
||||||
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
|
||||||
%: Makefile
|
|
||||||
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
|
21
README.md
21
README.md
@ -1,22 +1,25 @@
|
|||||||
<p align="center">
|
<p align="center">
|
||||||
<img src="images/rugemmie.gif" />
|
<img src="docs/img/temmie.png" width= 200px/>
|
||||||
|
</p>
|
||||||
|
<p align="center">
|
||||||
|
<a href="https://temmies.confest.im"><img alt="Read the Docs" src="https://img.shields.io/readthedocs/temmies"></a>
|
||||||
|
<img alt="GitHub" src="https://img.shields.io/github/license/Code-For-Groningen/temmies">
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
A python library which interacts with themis. Uses bs4. I'll try to end development on a somewhat working state.
|
A python library which interacts with [Themis](https://themis.housing.rug.nl/). Uses bs4. I'll try to end development on a somewhat working state.
|
||||||
|
|
||||||
## Intended Features
|
## Intended Features
|
||||||
* [x] Log in
|
* [x] Log in
|
||||||
* [ ] Classes, methods and attributes described in the map below
|
* [x] Submit
|
||||||
|
* [x] Bulk download of test cases and files
|
||||||
|
* [x] Submission status
|
||||||
|
|
||||||
## Docs
|
## Docs
|
||||||
[here](http://temmies.rtfd.io/). Heavily WIP.
|
- [here](http://temmies.confest.im/).
|
||||||
|
|
||||||
## Class map
|
|
||||||

|
|
||||||
|
|
||||||
## Possible continuations
|
## Possible continuations
|
||||||
* [ ] Discord bot
|
* Discord bot
|
||||||
* [ ] CLI program
|
* [CLI program](https://github.com/Code-For-Groningen/temmies-cli)
|
||||||
|
|
||||||
## Thanks to
|
## Thanks to
|
||||||
* [Glitchcat](https://glitchcat.github.io/themis-api/), cool docs bro.
|
* [Glitchcat](https://glitchcat.github.io/themis-api/), cool docs bro.
|
||||||
|
10
docs/about.md
Normal file
10
docs/about.md
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
# This project was made with ❤️
|
||||||
|
By [Boyan](https://confest.im) from the student organization [Code for Groningen](https://github.com/Code-For-Groningen/).
|
||||||
|
|
||||||
|
It has **no** affiliation with the [University of Groningen](https://rug.nl).
|
||||||
|
|
||||||
|
## Contact
|
||||||
|
Shoot me an email: boyan(plus)cfg(at)bobokara.com.
|
||||||
|
|
||||||
|
## License
|
||||||
|
This project is licensed under the GPL 3.0 license.
|
187
docs/api.md
Normal file
187
docs/api.md
Normal file
@ -0,0 +1,187 @@
|
|||||||
|
# Classes
|
||||||
|
---
|
||||||
|
|
||||||
|
## `Themis`
|
||||||
|
Creates the initial connection to Themis.
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
```python
|
||||||
|
from temmies.themis import Themis
|
||||||
|
|
||||||
|
themis = Themis("s-number")
|
||||||
|
```
|
||||||
|
|
||||||
|
On the first run, you will be prompted for your password. Then, on the next run(s), you will be able to log in automatically, as the password is stored in the system keyring. If you want to delete it [click here](https://www.google.com/search?hl=en&q=delete%20a%20password%20from%20keyring).
|
||||||
|
|
||||||
|
### Methods
|
||||||
|
#### `login()`
|
||||||
|
Logs in to Themis. Runs automatically when the class is initialized.
|
||||||
|
|
||||||
|
#### `get_year(year_path)`
|
||||||
|
Returns an instance of a [`Year`](#year) for the academic year specified by `year_path`.
|
||||||
|
|
||||||
|
```python
|
||||||
|
year = themis.get_year(2023, 2024)
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `all_years()`
|
||||||
|
Returns a list of `Year` instances corresponding to all years visible to the user.
|
||||||
|
|
||||||
|
```python
|
||||||
|
years = themis.all_years()
|
||||||
|
```
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
## `Year`
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
```python
|
||||||
|
year = themis.get_year(2023, 2024)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Methods
|
||||||
|
#### `get_course(course_title)`
|
||||||
|
Returns an instance of a [`Course`](#course) with the title `course_title`.
|
||||||
|
|
||||||
|
```python
|
||||||
|
pf = year.get_course("Programming Fundamentals (for CS)")
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `get_course_by_tag(course_tag)`
|
||||||
|
Returns an instance of a [`Course`](#course) using the course identifier `course_tag`.
|
||||||
|
|
||||||
|
```python
|
||||||
|
ai_course = year.get_course_by_tag("adinc-ai")
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `all_courses()`
|
||||||
|
Returns a list of `Course` instances corresponding to all courses visible to the user in a given `Year`.
|
||||||
|
|
||||||
|
```python
|
||||||
|
courses = year.all_courses()
|
||||||
|
```
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
## `Course`
|
||||||
|
### Usage
|
||||||
|
```python
|
||||||
|
pf = year.get_course("Programming Fundamentals (for CS)")
|
||||||
|
assignments = pf.get_groups()
|
||||||
|
```
|
||||||
|
|
||||||
|
### Methods
|
||||||
|
#### `get_groups(full=False)`
|
||||||
|
Returns a list of `ExerciseGroup` or `Group` instances corresponding to all items visible to the user in a given `Course`. The default argument is `full=False`, which will only return the top-level (name, link) of each item. If `full=True`, it will traverse the whole course.
|
||||||
|
|
||||||
|
```python
|
||||||
|
ai_groups = ai_course.get_groups(full=True)
|
||||||
|
exercise = ai_groups[7].exercises[1]
|
||||||
|
exercise.submit(["solution.py"], silent=False)
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `get_group(name, full=False)`
|
||||||
|
Returns an instance of an `ExerciseGroup` or `Group` with the name `name`. The default argument is `full=False`, which will only return the (name, link) of the group. If `full=True`, it will traverse the whole group.
|
||||||
|
|
||||||
|
```python
|
||||||
|
week1 = pf.get_group("Week 1")
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `create_group(item_data)`
|
||||||
|
Creates and returns a `Group` or `ExerciseGroup` instance based on `item_data`.
|
||||||
|
|
||||||
|
```python
|
||||||
|
group = course.create_group(item_data)
|
||||||
|
```
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
## `Group`
|
||||||
|
|
||||||
|
Represents an item in Themis, which can be either a folder (non-submittable) or an assignment (submittable).
|
||||||
|
|
||||||
|
### Methods
|
||||||
|
#### `get_items()`
|
||||||
|
Returns all items (groups and assignments) under this group.
|
||||||
|
|
||||||
|
```python
|
||||||
|
items = week1.get_items()
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `get_item_by_title(title)`
|
||||||
|
Returns a single item by its title (case-insensitive).
|
||||||
|
|
||||||
|
```python
|
||||||
|
item = week1.get_item_by_title("Exercise 2")
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `get_status(text=False)`
|
||||||
|
Retrieves the status of the group. When `text=True`, returns the status as strings. Otherwise, returns submission objects or strings.
|
||||||
|
|
||||||
|
```python
|
||||||
|
status = group.get_status()
|
||||||
|
leading_submission = status["leading"]
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `download_files(path=".")`
|
||||||
|
Downloads all files available for this group to a directory `path`. Defaults to the current directory.
|
||||||
|
|
||||||
|
```python
|
||||||
|
group.download_files()
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `download_tcs(path=".")`
|
||||||
|
Downloads all test cases for this group to a directory `path`. Defaults to the current directory.
|
||||||
|
|
||||||
|
```python
|
||||||
|
group.download_tcs()
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `submit(files, judge=True, wait=True, silent=True)`
|
||||||
|
Submits the files to the group. Default arguments are `judge=True`, `wait=True`, and `silent=True`.
|
||||||
|
|
||||||
|
```python
|
||||||
|
group.submit(["solution.py"], silent=False)
|
||||||
|
```
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
## `ExerciseGroup`
|
||||||
|
Represents a submittable exercise. Inherits from `Group`.
|
||||||
|
|
||||||
|
### Additional Methods
|
||||||
|
#### `submit(files)`
|
||||||
|
Submits files to the exercise. Raises an error if the item is not submittable.
|
||||||
|
|
||||||
|
```python
|
||||||
|
exercise.submit(["solution.py"])
|
||||||
|
```
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
## `Submission`
|
||||||
|
|
||||||
|
Represents a submission for a specific exercise.
|
||||||
|
|
||||||
|
### Methods
|
||||||
|
#### `get_test_cases()`
|
||||||
|
Returns a dictionary of test cases and their statuses.
|
||||||
|
|
||||||
|
```python
|
||||||
|
test_cases = submission.get_test_cases()
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `get_info()`
|
||||||
|
Returns detailed information about the submission.
|
||||||
|
|
||||||
|
```python
|
||||||
|
info = submission.get_info()
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `get_files()`
|
||||||
|
Returns a list of uploaded files in the format `(name, URL)`.
|
||||||
|
|
||||||
|
```python
|
||||||
|
files = submission.get_files()
|
||||||
|
```
|
21
docs/changelog.md
Normal file
21
docs/changelog.md
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
|
||||||
|
## **Changelog**
|
||||||
|
|
||||||
|
### **Version 1.1.0**
|
||||||
|
|
||||||
|
#### **Documentation**
|
||||||
|
- Fixed method signatures to align with actual functionality.
|
||||||
|
- Updated `get_status` to properly handle `Submission` instances.
|
||||||
|
- Ensured all class and method examples are consistent with the codebase.
|
||||||
|
|
||||||
|
#### **Codebase**
|
||||||
|
- Prepended `get_` to all methods in `Submission`
|
||||||
|
- Created base `Group` from which `Course` and `ExerciseGroup` inherit.
|
||||||
|
- Using system keyring to store passwords (Issue #11)
|
||||||
|
|
||||||
|
### **Version 1.2.0**
|
||||||
|
|
||||||
|
#### **Codebase**
|
||||||
|
- Moved all methods related to downloading files (including test cases) to `Group`.
|
||||||
|
- Created `get_test_cases` and `get_files` methods in `Group`.
|
||||||
|
- We are now using the [API](https://themis.housing.rug.nl/api/navigation/2023-2024) (which mysteriously appeared) to get the year/course structure.
|
27
docs/conf.py
27
docs/conf.py
@ -1,27 +0,0 @@
|
|||||||
# Configuration file for the Sphinx documentation builder.
|
|
||||||
#
|
|
||||||
# For the full list of built-in configuration values, see the documentation:
|
|
||||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html
|
|
||||||
|
|
||||||
# -- Project information -----------------------------------------------------
|
|
||||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
|
|
||||||
|
|
||||||
project = 'temmies-docs'
|
|
||||||
copyright = '2024, Boyan K.'
|
|
||||||
author = 'Boyan K.'
|
|
||||||
|
|
||||||
# -- General configuration ---------------------------------------------------
|
|
||||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
|
|
||||||
|
|
||||||
extensions = []
|
|
||||||
|
|
||||||
templates_path = ['_templates']
|
|
||||||
exclude_patterns = []
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# -- Options for HTML output -------------------------------------------------
|
|
||||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
|
|
||||||
|
|
||||||
html_theme = 'sphinx_rtd_theme'
|
|
||||||
html_static_path = ['_static']
|
|
BIN
docs/img/temmie.png
Normal file
BIN
docs/img/temmie.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 38 KiB |
40
docs/index.md
Normal file
40
docs/index.md
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
# Temmies!
|
||||||
|
<center></center>
|
||||||
|
|
||||||
|
|
||||||
|
## What is this?
|
||||||
|
A python library which interacts with themis. Uses bs4. I'll try to end development on a somewhat working state. [Check out the code](https://github.com/Code-For-Groningen/temmies)
|
||||||
|
|
||||||
|
## Intended Features
|
||||||
|
* Log in
|
||||||
|
* Bulk download of test cases and files~~
|
||||||
|
* Submitting files
|
||||||
|
* Somewhat easy to use API to interact with courses
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
```bash
|
||||||
|
pip install temmies
|
||||||
|
```
|
||||||
|
|
||||||
|
## Example Usage
|
||||||
|
```python
|
||||||
|
from temmies.themis import Themis
|
||||||
|
|
||||||
|
# Log in
|
||||||
|
themis = Themis("s-number") # You will be prompted for your password
|
||||||
|
|
||||||
|
# Get a year
|
||||||
|
year = themis.get_year("2023-2024")
|
||||||
|
|
||||||
|
# Get a course
|
||||||
|
course = year.get_course("Programming Fundamentals (for CS)")
|
||||||
|
|
||||||
|
# Get an assignment
|
||||||
|
assignment = course.get_group("Assignment 1")
|
||||||
|
|
||||||
|
# Submit 2 files
|
||||||
|
assignment.submit(["among.c", "us.py"])
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1,21 +0,0 @@
|
|||||||
.. temmies-docs documentation master file, created by
|
|
||||||
sphinx-quickstart on Tue Feb 13 20:53:28 2024.
|
|
||||||
You can adapt this file completely to your liking, but it should at least
|
|
||||||
contain the root `toctree` directive.
|
|
||||||
|
|
||||||
Temmies!
|
|
||||||
========================================
|
|
||||||
.. image:: https://static.wikia.nocookie.net/undertale/images/7/7b/Temmie_battle_idle.gif
|
|
||||||
:align: center
|
|
||||||
.. toctree::
|
|
||||||
:maxdepth: 2
|
|
||||||
:caption: Contents:
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Indices and tables
|
|
||||||
==================
|
|
||||||
|
|
||||||
* :ref:`genindex`
|
|
||||||
* :ref:`modindex`
|
|
||||||
* :ref:`search`
|
|
@ -1 +0,0 @@
|
|||||||
sphinx-rtd-theme==1.3.0
|
|
Binary file not shown.
Before Width: | Height: | Size: 141 KiB |
Binary file not shown.
Before Width: | Height: | Size: 190 KiB |
7
mkdocs.yml
Normal file
7
mkdocs.yml
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
site_name: Temmies
|
||||||
|
nav:
|
||||||
|
- Temmies: index.md
|
||||||
|
- API Reference: api.md
|
||||||
|
- About: about.md
|
||||||
|
- Change Log: changelog.md
|
||||||
|
theme: readthedocs
|
19
requirements.txt
Normal file
19
requirements.txt
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
beautifulsoup4==4.12.3
|
||||||
|
bs4==0.0.2
|
||||||
|
certifi==2024.8.30
|
||||||
|
cffi==1.17.1
|
||||||
|
charset-normalizer==3.4.0
|
||||||
|
cryptography==43.0.3
|
||||||
|
idna==3.10
|
||||||
|
jaraco.classes==3.4.0
|
||||||
|
jaraco.context==6.0.1
|
||||||
|
jaraco.functools==4.1.0
|
||||||
|
jeepney==0.8.0
|
||||||
|
keyring==25.5.0
|
||||||
|
lxml==5.3.0
|
||||||
|
more-itertools==10.5.0
|
||||||
|
pycparser==2.22
|
||||||
|
requests==2.32.3
|
||||||
|
SecretStorage==3.3.3
|
||||||
|
soupsieve==2.6
|
||||||
|
urllib3==2.2.3
|
30
setup.py
Normal file
30
setup.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
from setuptools import find_packages, setup
|
||||||
|
|
||||||
|
with open("README.md", "r") as f:
|
||||||
|
l_description = f.read()
|
||||||
|
|
||||||
|
setup(
|
||||||
|
name="temmies",
|
||||||
|
version="1.2.1",
|
||||||
|
packages=find_packages(),
|
||||||
|
description="A wrapper for the Themis website",
|
||||||
|
long_description=l_description,
|
||||||
|
long_description_content_type="text/markdown",
|
||||||
|
url="https://github.com/Code-For-Groningen/temmies",
|
||||||
|
author="Boyan K.",
|
||||||
|
author_email="boyan@confest.im",
|
||||||
|
license="GPLv3",
|
||||||
|
classifiers=[
|
||||||
|
"Development Status :: 4 - Beta",
|
||||||
|
"Intended Audience :: Developers",
|
||||||
|
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
|
||||||
|
"Programming Language :: Python :: 3.9",
|
||||||
|
],
|
||||||
|
install_requires=[
|
||||||
|
"requests",
|
||||||
|
"lxml",
|
||||||
|
"beautifulsoup4",
|
||||||
|
"keyring"
|
||||||
|
],
|
||||||
|
python_requires=">=3.9",
|
||||||
|
)
|
@ -1,31 +0,0 @@
|
|||||||
# Module to handle each assignment (most difficult part)
|
|
||||||
|
|
||||||
from Downloadable import Downloadable
|
|
||||||
from Base import Base
|
|
||||||
from Exercise import Exercise
|
|
||||||
from requests import Session
|
|
||||||
|
|
||||||
|
|
||||||
class Assignment(Base):
|
|
||||||
def __init__(self, url:str, name:str, session:Session, parent):
|
|
||||||
super().__init__(url, name, session, parent)
|
|
||||||
self.download = Downloadable(name, session, self)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return f"Assignment {self.name} in course {self.parent.name}"
|
|
||||||
|
|
||||||
def getExercises(self) -> list[Exercise]:
|
|
||||||
# Find li large
|
|
||||||
ul = self.soup.find('ul', class_='round')
|
|
||||||
|
|
||||||
# Turn each li to an exercise instance
|
|
||||||
return self.liLargeToExercises(ul, self.session, self)
|
|
||||||
|
|
||||||
def getExercise(self, name:str) -> Exercise:
|
|
||||||
# Get the exercise
|
|
||||||
r = self.session.get(self.url)
|
|
||||||
soup = BeautifulSoup(r.text, 'lxml')
|
|
||||||
# Search by name
|
|
||||||
exercise = soup.find('a', text=name)
|
|
||||||
# Get the url and transform it into an exercise object
|
|
||||||
return Exercise(url=exercise['href'], name=name, session=self.session, assignment=self)
|
|
72
src/Base.py
72
src/Base.py
@ -1,72 +0,0 @@
|
|||||||
# Noticed there's a similar pattern in the classes, so I'm going to create a base class for them
|
|
||||||
|
|
||||||
# classes that inherit from Base:
|
|
||||||
# - Course
|
|
||||||
# - Assignment
|
|
||||||
# - Exercise
|
|
||||||
from requests import Session
|
|
||||||
from bs4 import BeautifulSoup
|
|
||||||
|
|
||||||
class Base:
|
|
||||||
def __init__(self, url:str, name:str, session:Session, parent):
|
|
||||||
self.url = url
|
|
||||||
self.name = name
|
|
||||||
self.session = session
|
|
||||||
self.parent = parent
|
|
||||||
|
|
||||||
def __parseCfgBlock(self, div:BeautifulSoup) -> dict:
|
|
||||||
# We assume that the div is a submission with class "cfg-container round"
|
|
||||||
# Put each key and value in a dictionary
|
|
||||||
# The key is a span with a class "cfg-key"
|
|
||||||
# The value is a span with a class "cfg-val"
|
|
||||||
|
|
||||||
# Get the key and value spans
|
|
||||||
keys = div.find_all('span', class_="cfg-key")
|
|
||||||
values = div.find_all('span', class_="cfg-val")
|
|
||||||
|
|
||||||
# Create a dictionary
|
|
||||||
submission = {}
|
|
||||||
|
|
||||||
# Put each key and value in the dictionary
|
|
||||||
for i in range(len(keys)):
|
|
||||||
submission[keys[i].text] = values[i].text
|
|
||||||
|
|
||||||
return submission
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: Fix
|
|
||||||
def getDownloadable(self, soup) -> list:
|
|
||||||
# Make sure we only get the ones that have a link
|
|
||||||
# We parse the cfg and check for the key "Downloads"
|
|
||||||
# Check if downloads are available
|
|
||||||
print(soup)
|
|
||||||
cfg = soup.find('div', class_='cfg-container round')
|
|
||||||
print(cfg)
|
|
||||||
cfg = self.__parseCfgBlock(cfg)
|
|
||||||
# Get the downloads
|
|
||||||
downloads = cfg.get("Downloads", None)
|
|
||||||
if downloads == None:
|
|
||||||
return []
|
|
||||||
# Get the links
|
|
||||||
links = downloads.find_all('a')
|
|
||||||
files = []
|
|
||||||
for link in links:
|
|
||||||
files.append(Base(link['href'], link.text, self.session, self))
|
|
||||||
|
|
||||||
return files
|
|
||||||
|
|
||||||
def getSubmissions(self):
|
|
||||||
# We change the url where course becomes stats
|
|
||||||
url = self.url.replace("course", "stats")
|
|
||||||
r = self.session.get(url)
|
|
||||||
|
|
||||||
# Get each div with class "cfg-container round"
|
|
||||||
soup = BeautifulSoup(r.text, 'lxml')
|
|
||||||
divs = soup.find_all('div', class_="cfg-container round")
|
|
||||||
|
|
||||||
# The first one is an overview, the next ones are the submissions
|
|
||||||
submissions = []
|
|
||||||
for div in divs[1:]:
|
|
||||||
submissions.append(self.__parseCfgBlock(div))
|
|
||||||
return self.__parseCfgBlock(divs[0]), submissions
|
|
||||||
|
|
@ -1,45 +0,0 @@
|
|||||||
# Class to handle courses
|
|
||||||
from bs4 import BeautifulSoup
|
|
||||||
from requests import Session
|
|
||||||
from ExerciseGroup import ExerciseGroup
|
|
||||||
import re
|
|
||||||
from Base import Base
|
|
||||||
from exceptions.CourseUnavailable import CourseUnavailable
|
|
||||||
|
|
||||||
# PROBLEM: This implementation is bad due to inconsistencies in the website
|
|
||||||
# The way we can tell the difference between an assignment and an exercise is by the presence of an a with the class "ass-submitable"
|
|
||||||
# As opposed to folders which contain exercises which are marked with "ass-group"
|
|
||||||
# Therefore, we should take that into consideration and spawn the corresponding Exercise or Assignment class
|
|
||||||
# Naming becomes a bit inconsistent like that as well, as Assignments could be Exercises. Might opt to call the "assignments" "exerciseGroups" or some shit.
|
|
||||||
|
|
||||||
class Course(Base):
|
|
||||||
# Extend the Base class init
|
|
||||||
def __init__(self, url:str, name:str, session:Session, parent):
|
|
||||||
super().__init__(url, name, session, parent)
|
|
||||||
self.assignments = []
|
|
||||||
self.__courseAvailable(self.session.get(self.url))
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return f"Course {self.name} in year {self.parent.year}"
|
|
||||||
|
|
||||||
def __courseAvailable(self, r):
|
|
||||||
# Check if we got an error
|
|
||||||
# print(self.url)
|
|
||||||
if "Something went wrong" in r.text:
|
|
||||||
raise CourseUnavailable()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def info(self):
|
|
||||||
return {
|
|
||||||
"name": self.name,
|
|
||||||
"year": self.parent.year,
|
|
||||||
"url": self.url,
|
|
||||||
"assignments": [x.name for x in self.assignments]
|
|
||||||
}
|
|
||||||
|
|
||||||
def getExerciseGroups(self):
|
|
||||||
r = self.session.get(self.url)
|
|
||||||
soup = BeautifulSoup(r.text, 'lxml')
|
|
||||||
section = soup.find('div', class_="ass-children")
|
|
||||||
entries = section.find_all('a', href=True)
|
|
||||||
return [ExerciseGroup(f"https://themis.housing.rug.nl{x['href']}", x.text, self.session, self) for x in entries]
|
|
@ -1,45 +0,0 @@
|
|||||||
# Since we can download files both from the assignment itself and its exercises, this class will handle both
|
|
||||||
|
|
||||||
from requests import Session
|
|
||||||
from bs4 import BeautifulSoup
|
|
||||||
from Base import Base
|
|
||||||
|
|
||||||
class Downloadable(Base):
|
|
||||||
def __init__(self, name, session:Session, parent):
|
|
||||||
self.name = name
|
|
||||||
self.session = session
|
|
||||||
self.parent = parent
|
|
||||||
|
|
||||||
# File handling
|
|
||||||
def __findFile(self, name:str):
|
|
||||||
# Get the file by name
|
|
||||||
for file in self.files:
|
|
||||||
if file.name == name:
|
|
||||||
return file
|
|
||||||
return None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def files(self) -> list:
|
|
||||||
# Create a list of files
|
|
||||||
# They are all links in a span with class "cfg-val"
|
|
||||||
r = self.session.get("https://themis.housing.rug.nl" + self.parent.url)
|
|
||||||
soup = BeautifulSoup(r.text, 'lxml')
|
|
||||||
return self.getDownloadable(soup)
|
|
||||||
|
|
||||||
def download(self, filename:str) -> str:
|
|
||||||
# Download the file
|
|
||||||
if filename == None:
|
|
||||||
raise NameError("No filename provided")
|
|
||||||
|
|
||||||
file = self.__findFile(filename)
|
|
||||||
r = self.session.get(file.url, stream=True)
|
|
||||||
with open(file.name, 'wb') as f:
|
|
||||||
for chunk in r.iter_content(chunk_size=1024):
|
|
||||||
if chunk:
|
|
||||||
f.write(chunk)
|
|
||||||
return file.name
|
|
||||||
|
|
||||||
def downloadAll(self) -> list[str]:
|
|
||||||
# Download all files
|
|
||||||
return [self.download(file.name) for file in self.files]
|
|
||||||
|
|
@ -1,73 +0,0 @@
|
|||||||
from Base import Base
|
|
||||||
from Downloadable import Downloadable
|
|
||||||
from requests import Session
|
|
||||||
|
|
||||||
from time import sleep
|
|
||||||
|
|
||||||
|
|
||||||
class Exercise(Base):
|
|
||||||
def __init__(self, url:str, name:str, session:Session, parent):
|
|
||||||
super().__init__()
|
|
||||||
self.download = Downloadable(url, name, session, self)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return f"Exercise {self.name} in assignment {self.parent.name}"
|
|
||||||
|
|
||||||
def getTests(self) -> list[str]:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def submit(self, file:str, comment:str) -> str:
|
|
||||||
# Submit a file
|
|
||||||
# The form is in the page with class "cfg-container round"
|
|
||||||
# The form is a POST request to the url with the file and the comment
|
|
||||||
# The url looks like this: https://themis.housing.rug.nl/submit/{year}/{course}/{assignment}/{exercise}?_csrf={session_csrf}&sudo={username}
|
|
||||||
# The current url looks like: https://themis.housing.rug.nl/course/{year}/{course}/{assignment}/{exercise}
|
|
||||||
# The request should contain the contents of the file
|
|
||||||
|
|
||||||
# Get the url
|
|
||||||
url = self.url.replace("course", "submit")
|
|
||||||
# Get the csrf token
|
|
||||||
csrf = self.session.cookies['_csrf']
|
|
||||||
# Get the username
|
|
||||||
username = self.session.cookies['username']
|
|
||||||
|
|
||||||
# Open the file
|
|
||||||
with open(file, 'rb') as f:
|
|
||||||
# Submit the file
|
|
||||||
# After submission it will 302 to the current submission page
|
|
||||||
r = self.session.post(url, files={'file': f}, data={'comment': comment, '_csrf': csrf, 'sudo': username})
|
|
||||||
|
|
||||||
# Follow the redirect and repeatedly send get requests to the page
|
|
||||||
|
|
||||||
# We have a table which represents the test cases. The program should wait until all the test cases are done
|
|
||||||
# The test case is done when all of the elements in the table are not none
|
|
||||||
# The element which showcases this for each <tr class="sub-casetop">
|
|
||||||
# is the class in there. if it is "queued" it is still running.
|
|
||||||
|
|
||||||
# Get the url
|
|
||||||
url = r.url
|
|
||||||
# Get the page
|
|
||||||
r = self.session.get(url)
|
|
||||||
# Get the soup
|
|
||||||
soup = BeautifulSoup(r.text, 'lxml')
|
|
||||||
# Get the table
|
|
||||||
table = soup.find('table')
|
|
||||||
# Get the rows
|
|
||||||
rows = table.find_all('tr', class_='sub-casetop')
|
|
||||||
# Get the status
|
|
||||||
status = [row.find('td', class_='status').text for row in rows]
|
|
||||||
# Wait until all the status are not queued
|
|
||||||
while "queued" in status:
|
|
||||||
# Wait a bit
|
|
||||||
sleep(1)
|
|
||||||
# Get the page
|
|
||||||
r = self.session.get(url)
|
|
||||||
# Get the soup
|
|
||||||
soup = BeautifulSoup(r.text, 'lxml')
|
|
||||||
# Get the table
|
|
||||||
table = soup.find('table')
|
|
||||||
# Get the rows
|
|
||||||
rows = table.find_all('tr', class_='sub-casetop')
|
|
||||||
|
|
||||||
|
|
||||||
pass
|
|
@ -1,39 +0,0 @@
|
|||||||
from Base import Base
|
|
||||||
from bs4 import BeautifulSoup\
|
|
||||||
|
|
||||||
class ExerciseGroup(Base):
|
|
||||||
# I can't tell if I'm already an exercise :C
|
|
||||||
|
|
||||||
def __init__(self, url:str, name:str, session, parent):
|
|
||||||
super().__init__(url, name, session, parent)
|
|
||||||
self.exercises = self.getExercises()
|
|
||||||
self.folders = self.getFolders()
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return f"ExerciseGroup {self.name} in course {self.parent.name}"
|
|
||||||
|
|
||||||
def getExercises(self) -> list:
|
|
||||||
r = self.session.get(self.url)
|
|
||||||
soup = BeautifulSoup(r.text, 'lxml')
|
|
||||||
section = soup.find('div', class_="ass-children")
|
|
||||||
try:
|
|
||||||
submittables = section.find_all('a', class_="ass-submitable")
|
|
||||||
except AttributeError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return submittables
|
|
||||||
|
|
||||||
# Returns a list of names of the folders
|
|
||||||
def getFolders(self) -> list:
|
|
||||||
r = self.session.get(self.url)
|
|
||||||
soup = BeautifulSoup(r.text, 'lxml')
|
|
||||||
section = soup.find('div', class_="ass-children")
|
|
||||||
try:
|
|
||||||
folders = section.find_all('a', class_="ass-group")
|
|
||||||
except AttributeError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return [x.text for x in folders]
|
|
||||||
|
|
||||||
def recurse(self, folder:str):
|
|
||||||
print(self.url)
|
|
@ -1,62 +0,0 @@
|
|||||||
from Year import Year
|
|
||||||
import urllib3
|
|
||||||
from requests import Session
|
|
||||||
from bs4 import BeautifulSoup
|
|
||||||
|
|
||||||
# Disable warnings
|
|
||||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
|
||||||
|
|
||||||
class Themis:
|
|
||||||
def __init__(self,user:str, passwd:str):
|
|
||||||
self.session = self.login(user,passwd)
|
|
||||||
self.years = []
|
|
||||||
self.url = "https://themis.housing.rug.nl/course/"
|
|
||||||
|
|
||||||
def login(self, user, passwd):
|
|
||||||
headers = {
|
|
||||||
"user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chromium/80.0.3987.160 Chrome/80.0.3987.163 Safari/537.36"
|
|
||||||
}
|
|
||||||
|
|
||||||
data = {
|
|
||||||
"user": user,
|
|
||||||
"password":passwd,
|
|
||||||
"null": None
|
|
||||||
}
|
|
||||||
|
|
||||||
with Session() as s:
|
|
||||||
url = 'https://themis.housing.rug.nl/log/in'
|
|
||||||
r = s.get(url,headers=headers,verify=False)
|
|
||||||
soup = BeautifulSoup(r.text, 'lxml')
|
|
||||||
|
|
||||||
# get the csrf token and add it to payload
|
|
||||||
csrfToken = soup.find('input',attrs = {'name':'_csrf'})['value']
|
|
||||||
data['_csrf'] = csrfToken
|
|
||||||
|
|
||||||
# Login
|
|
||||||
r = s.post(url,data=data,headers = headers)
|
|
||||||
|
|
||||||
# check if login was successful
|
|
||||||
log_out = "Welcome, logged in as" in r.text
|
|
||||||
if not log_out:
|
|
||||||
raise Exception(f"Login for user {user} failed")
|
|
||||||
|
|
||||||
return s
|
|
||||||
|
|
||||||
|
|
||||||
def getYear(self, start:int, end:int):
|
|
||||||
# Get the current year
|
|
||||||
return Year(self.session, self, start, end)
|
|
||||||
|
|
||||||
def allYears(self):
|
|
||||||
# All of them are in a big ul at the beginning of the page
|
|
||||||
r = self.session.get(self.url)
|
|
||||||
soup = BeautifulSoup(r.text, 'lxml')
|
|
||||||
ul = soup.find('ul', class_='round')
|
|
||||||
lis = ul.find_all('li', class_='large')
|
|
||||||
years = []
|
|
||||||
for li in lis:
|
|
||||||
# format: 2019-2020
|
|
||||||
year = li.a.text.split("-")
|
|
||||||
years.append(Year(self.session, self, int(year[0]), int(year[1])))
|
|
||||||
|
|
||||||
return years # Return a list of year objects
|
|
55
src/Year.py
55
src/Year.py
@ -1,55 +0,0 @@
|
|||||||
# Year class to represent an academic year
|
|
||||||
|
|
||||||
from bs4 import BeautifulSoup
|
|
||||||
from Course import Course
|
|
||||||
from requests import Session
|
|
||||||
from exceptions.CourseUnavailable import CourseUnavailable
|
|
||||||
|
|
||||||
# Works
|
|
||||||
class Year:
|
|
||||||
def __init__(self, session:Session, parent, start_year:int, end_year:int):
|
|
||||||
self.start = start_year
|
|
||||||
self.year = end_year
|
|
||||||
self.session = session
|
|
||||||
self.url = self.__constructUrl()
|
|
||||||
|
|
||||||
# Method to set the url
|
|
||||||
def __constructUrl(self):
|
|
||||||
return f"https://themis.housing.rug.nl/course/{self.start}-{self.year}"
|
|
||||||
|
|
||||||
# Method to get the courses of the year
|
|
||||||
def getCourses(self, errors:bool=False) -> list[Course]:
|
|
||||||
# lis in a big ul
|
|
||||||
r = self.session.get(self.url)
|
|
||||||
soup = BeautifulSoup(r.text, 'lxml')
|
|
||||||
lis = soup.find_all('li', class_='large')
|
|
||||||
courses = []
|
|
||||||
for li in lis:
|
|
||||||
try:
|
|
||||||
suffix = (li.a['href'].replace(f"course/{self.start}-{self.year}", ""))
|
|
||||||
courses.append(
|
|
||||||
Course(
|
|
||||||
self.url + suffix,
|
|
||||||
li.a.text,
|
|
||||||
self.session,
|
|
||||||
self
|
|
||||||
)
|
|
||||||
)
|
|
||||||
except CourseUnavailable:
|
|
||||||
if errors:
|
|
||||||
raise CourseUnavailable(f"Course {li.a.text} in year {self.start}-{self.year} is not available")
|
|
||||||
else:
|
|
||||||
print("error with course", li.a.text)
|
|
||||||
continue
|
|
||||||
|
|
||||||
|
|
||||||
return courses
|
|
||||||
|
|
||||||
def getCourse(self, name:str) -> Course:
|
|
||||||
# Get the course
|
|
||||||
r = self.session.get(self.url)
|
|
||||||
soup = BeautifulSoup(r.text, 'lxml')
|
|
||||||
# Search by name
|
|
||||||
course = self.url + soup.find('a', text=name)['href'].replace(f"course/{self.start}-{self.year}", "")
|
|
||||||
# Get the url and transform it into a course object
|
|
||||||
return Course(url=course, name=name, session=self.session, parent=self)
|
|
@ -1,4 +0,0 @@
|
|||||||
class CourseUnavailable(Exception):
|
|
||||||
def __init__(self, message:str="Error in course"):
|
|
||||||
self.message = message
|
|
||||||
super().__init__(self.message)
|
|
5
temmies/__init__.py
Normal file
5
temmies/__init__.py
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
from .themis import Themis
|
||||||
|
import urllib3
|
||||||
|
|
||||||
|
__all__ = ["Themis"]
|
||||||
|
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
35
temmies/course.py
Normal file
35
temmies/course.py
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
from .group import Group
|
||||||
|
|
||||||
|
|
||||||
|
class Course(Group):
|
||||||
|
"""
|
||||||
|
Represents a course.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, session, course_path: str, title: str, parent):
|
||||||
|
super().__init__(session, course_path, title, parent)
|
||||||
|
self.course_path = course_path # e.g., '/2023-2024/adinc-ai'
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"Course({self.title})"
|
||||||
|
|
||||||
|
def create_group(self, item_data):
|
||||||
|
"""
|
||||||
|
Create a subgroup (Group or ExerciseGroup) based on item data.
|
||||||
|
"""
|
||||||
|
if item_data.get("submitable", False):
|
||||||
|
return ExerciseGroup(
|
||||||
|
self.session,
|
||||||
|
item_data["path"],
|
||||||
|
item_data["title"],
|
||||||
|
self,
|
||||||
|
item_data.get("submitable", False),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return Group(
|
||||||
|
self.session,
|
||||||
|
item_data["path"],
|
||||||
|
item_data["title"],
|
||||||
|
self,
|
||||||
|
item_data.get("submitable", False),
|
||||||
|
)
|
0
temmies/exceptions/__init__.py
Normal file
0
temmies/exceptions/__init__.py
Normal file
6
temmies/exceptions/course_unavailable.py
Normal file
6
temmies/exceptions/course_unavailable.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
""" This module contains the CourseUnavailable exception. """
|
||||||
|
|
||||||
|
class CourseUnavailable(Exception):
|
||||||
|
"""CourseUnavailable Exception"""
|
||||||
|
def __init__(self, message: str = ""):
|
||||||
|
super().__init__(f"Course unavailable: {message}")
|
8
temmies/exceptions/illegal_action.py
Normal file
8
temmies/exceptions/illegal_action.py
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
"""
|
||||||
|
Illegal Action Exception
|
||||||
|
"""
|
||||||
|
|
||||||
|
class IllegalAction(Exception):
|
||||||
|
"""Illegal Action Exception"""
|
||||||
|
def __init__(self, message: str = ""):
|
||||||
|
super().__init__(f"Illegal action: {message}")
|
51
temmies/exercise_group.py
Normal file
51
temmies/exercise_group.py
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
from .group import Group
|
||||||
|
from .submission import Submission
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
|
class ExerciseGroup(Group):
|
||||||
|
"""
|
||||||
|
Represents a submittable exercise.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, session, path: str, title: str, parent, submitable: bool = True):
|
||||||
|
super().__init__(session, path, title, parent, submitable=submitable)
|
||||||
|
self.submit_url = f"{self.base_url}/api/submit{self.path}"
|
||||||
|
self.__find_name()
|
||||||
|
|
||||||
|
def __find_name(self):
|
||||||
|
"""
|
||||||
|
Find the name of the exercise group.
|
||||||
|
"""
|
||||||
|
if self.title == "":
|
||||||
|
# Find using beautiful soup (it is the last a with class 'fill accent large')
|
||||||
|
response = self.session.get(self.base_url + self.path)
|
||||||
|
soup = BeautifulSoup(response.text, "lxml")
|
||||||
|
title_elements = soup.find_all("a", class_="fill accent large")
|
||||||
|
if title_elements:
|
||||||
|
self.title = title_elements[-1].get_text(strip=True)
|
||||||
|
else:
|
||||||
|
self.title = self.path.split("/")[-1]
|
||||||
|
|
||||||
|
def submit(self, files: list[str]) -> Submission:
|
||||||
|
"""
|
||||||
|
Submit files to this exercise.
|
||||||
|
"""
|
||||||
|
if not self.submitable:
|
||||||
|
raise ValueError(f"Cannot submit to non-submittable item '{self.title}'.")
|
||||||
|
|
||||||
|
# Prepare the files and data for submission
|
||||||
|
files_payload = {}
|
||||||
|
for idx, file_path in enumerate(files):
|
||||||
|
file_key = f"file{idx}"
|
||||||
|
with open(file_path, "rb") as f:
|
||||||
|
files_payload[file_key] = (file_path, f.read())
|
||||||
|
|
||||||
|
response = self.session.post(self.submit_url, files=files_payload)
|
||||||
|
if response.status_code != 200:
|
||||||
|
raise ConnectionError(f"Failed to submit to '{self.title}'.")
|
||||||
|
|
||||||
|
submission_data = response.json()
|
||||||
|
return Submission(self.session, submission_data)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"ExerciseGroup({self.title})"
|
318
temmies/group.py
Normal file
318
temmies/group.py
Normal file
@ -0,0 +1,318 @@
|
|||||||
|
from bs4 import BeautifulSoup
|
||||||
|
from requests import Session
|
||||||
|
import os
|
||||||
|
from typing import Optional, Union, Dict
|
||||||
|
from .exceptions.illegal_action import IllegalAction
|
||||||
|
from .submission import Submission
|
||||||
|
|
||||||
|
class Group:
|
||||||
|
"""
|
||||||
|
Represents an item in Themis, which can be either a folder (non-submittable) or an assignment (submittable).
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, session, path: str, title: str, parent=None, submitable: bool = False):
|
||||||
|
self.session = session
|
||||||
|
self.path = path # e.g., '/2023-2024/adinc-ai/labs'
|
||||||
|
self.title = title
|
||||||
|
self.parent = parent
|
||||||
|
self.submitable = submitable
|
||||||
|
self.base_url = "https://themis.housing.rug.nl"
|
||||||
|
self.api_url = f"{self.base_url}/api/navigation{self.path}"
|
||||||
|
self.classes = []
|
||||||
|
|
||||||
|
# Adjust URL construction to include '/course' when accessing HTML pages
|
||||||
|
if not self.path.startswith('/course/'):
|
||||||
|
group_url = f"{self.base_url}/course{self.path}"
|
||||||
|
else:
|
||||||
|
group_url = f"{self.base_url}{self.path}"
|
||||||
|
|
||||||
|
# Fetch the page and parse it
|
||||||
|
response = self.session.get(group_url)
|
||||||
|
if response.status_code != 200:
|
||||||
|
raise ConnectionError(f"Failed to retrieve page for '{self.title}'. Tried {group_url}")
|
||||||
|
self._raw = BeautifulSoup(response.text, "lxml")
|
||||||
|
|
||||||
|
|
||||||
|
def get_items(self) -> list:
|
||||||
|
"""
|
||||||
|
Get all items (groups and assignments) under this group.
|
||||||
|
"""
|
||||||
|
section = self._raw.find("div", class_="ass-children")
|
||||||
|
if not section:
|
||||||
|
return []
|
||||||
|
|
||||||
|
entries = section.find_all("a", href=True)
|
||||||
|
items = []
|
||||||
|
for x in entries:
|
||||||
|
href = x['href']
|
||||||
|
name = x.text.strip()
|
||||||
|
classes = x.get('class', [])
|
||||||
|
submitable = "ass-submitable" in classes
|
||||||
|
item = Group(
|
||||||
|
session=self.session,
|
||||||
|
path=href,
|
||||||
|
title=name,
|
||||||
|
parent=self,
|
||||||
|
submitable=submitable
|
||||||
|
)
|
||||||
|
items.append(item)
|
||||||
|
return items
|
||||||
|
|
||||||
|
def get_item_by_title(self, title: str):
|
||||||
|
"""
|
||||||
|
Get a single item by its title, case-insensitive.
|
||||||
|
"""
|
||||||
|
items = self.get_items()
|
||||||
|
for item in items:
|
||||||
|
if (item.title.lower() == title.lower()) or (item.path.split("/")[-1] == title):
|
||||||
|
return item
|
||||||
|
raise ValueError(f"Item '{title}' not found under {self.title}.")
|
||||||
|
|
||||||
|
|
||||||
|
def get_status(self, text: bool = False) -> Union[Dict[str, Union[str, 'Submission']], None]:
|
||||||
|
"""
|
||||||
|
Get the status of the current group, if available.
|
||||||
|
"""
|
||||||
|
status_link = self._raw.find("a", text="Status")
|
||||||
|
if not status_link:
|
||||||
|
raise ValueError("Status information is not available for this group.")
|
||||||
|
|
||||||
|
status_url = f"{self.base_url}{status_link['href']}"
|
||||||
|
response = self.session.get(status_url)
|
||||||
|
if response.status_code != 200:
|
||||||
|
raise ConnectionError(f"Failed to retrieve status page for '{self.title}'.")
|
||||||
|
|
||||||
|
soup = BeautifulSoup(response.text, "lxml")
|
||||||
|
section = soup.find("div", class_="cfg-container")
|
||||||
|
|
||||||
|
if not section:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return self.__parse_status_section(section, text)
|
||||||
|
|
||||||
|
def __parse_status_section(self, section: BeautifulSoup, text: bool) -> Dict[str, Union[str, 'Submission']]:
|
||||||
|
"""
|
||||||
|
Parse the status section of the group and clean up keys.
|
||||||
|
"""
|
||||||
|
key_mapping = {
|
||||||
|
"leading the submission that counts towards the grade": "leading",
|
||||||
|
"best the latest submission with the best result": "best",
|
||||||
|
"latest the most recent submission": "latest",
|
||||||
|
"first pass the first submission that passed": "first_pass",
|
||||||
|
"last pass the last submission to pass before the deadline": "last_pass",
|
||||||
|
}
|
||||||
|
|
||||||
|
parsed = {}
|
||||||
|
cfg_lines = section.find_all("div", class_="cfg-line")
|
||||||
|
for line in cfg_lines:
|
||||||
|
key_element = line.find("span", class_="cfg-key")
|
||||||
|
value_element = line.find("span", class_="cfg-val")
|
||||||
|
if not key_element or not value_element:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Normalize key
|
||||||
|
raw_key = " ".join(key_element.get_text(separator=" ").strip().replace(":", "").lower().split())
|
||||||
|
key = key_mapping.get(raw_key, raw_key) # Use mapped key if available
|
||||||
|
|
||||||
|
# Process value
|
||||||
|
link = value_element.find("a", href=True)
|
||||||
|
if link and not text:
|
||||||
|
href = link["href"]
|
||||||
|
# Construct full URL
|
||||||
|
if href.startswith("/"):
|
||||||
|
submission_url = href
|
||||||
|
elif href.startswith("http"):
|
||||||
|
submission_url = href.replace("https://themis.housing.rug.nl", "")
|
||||||
|
else:
|
||||||
|
print(f"Invalid href '{href}' found in status page.")
|
||||||
|
continue # Skip this entry if href is invalid
|
||||||
|
|
||||||
|
# Instantiate Submission with submission_url and session
|
||||||
|
submission = Submission(submission_url, self.session)
|
||||||
|
parsed[key] = submission
|
||||||
|
else:
|
||||||
|
parsed[key] = value_element.get_text(separator=" ").strip()
|
||||||
|
|
||||||
|
return parsed
|
||||||
|
|
||||||
|
|
||||||
|
def get_test_cases(self) -> list[Dict[str, str]]:
|
||||||
|
"""
|
||||||
|
Get all test cases for this assignment.
|
||||||
|
"""
|
||||||
|
if not self.submitable:
|
||||||
|
raise ValueError(f"No test cases for non-submittable item '{self.title}'.")
|
||||||
|
|
||||||
|
sections = self._raw.find_all("div", class_="subsec round shade")
|
||||||
|
tcs = []
|
||||||
|
for div in sections:
|
||||||
|
res = div.find("h4", class_="info")
|
||||||
|
if res and "Test cases" in res.text:
|
||||||
|
for case in div.find_all("div", class_="cfg-line"):
|
||||||
|
link = case.find("a")
|
||||||
|
if link:
|
||||||
|
tcs.append({
|
||||||
|
'title': link.text.strip(),
|
||||||
|
'path': link['href']
|
||||||
|
})
|
||||||
|
return tcs
|
||||||
|
|
||||||
|
def download_tcs(self, path=".") -> list[str]:
|
||||||
|
"""
|
||||||
|
Download all test cases for this assignment.
|
||||||
|
"""
|
||||||
|
test_cases = self.get_test_cases()
|
||||||
|
downloaded = []
|
||||||
|
for tc in test_cases:
|
||||||
|
url = f"{self.base_url}{tc['path']}"
|
||||||
|
print(f"Downloading {tc['title']}")
|
||||||
|
response = self.session.get(url)
|
||||||
|
if response.status_code == 200:
|
||||||
|
tc_filename = os.path.join(path, tc['title'])
|
||||||
|
with open(tc_filename, 'wb') as f:
|
||||||
|
f.write(response.content)
|
||||||
|
downloaded.append(tc_filename)
|
||||||
|
else:
|
||||||
|
print(f"Failed to download test case '{tc['title']}'")
|
||||||
|
return downloaded
|
||||||
|
|
||||||
|
def get_files(self) -> list[Dict[str, str]]:
|
||||||
|
"""
|
||||||
|
Get all downloadable files for this assignment.
|
||||||
|
"""
|
||||||
|
details = self._raw.find("div", id=lambda x: x and x.startswith("details"))
|
||||||
|
if not details:
|
||||||
|
return []
|
||||||
|
|
||||||
|
cfg_lines = details.find_all("div", class_="cfg-line")
|
||||||
|
files = []
|
||||||
|
|
||||||
|
for line in cfg_lines:
|
||||||
|
key = line.find("span", class_="cfg-key")
|
||||||
|
if key and "Downloads" in key.text.strip():
|
||||||
|
vals = line.find_all("span", class_="cfg-val")
|
||||||
|
for val in vals:
|
||||||
|
links = val.find_all("a")
|
||||||
|
for link in links:
|
||||||
|
files.append({
|
||||||
|
'title': link.text.strip(),
|
||||||
|
'path': link['href']
|
||||||
|
})
|
||||||
|
return files
|
||||||
|
|
||||||
|
def download_files(self, path=".") -> list[str]:
|
||||||
|
"""
|
||||||
|
Download all files available for this assignment.
|
||||||
|
"""
|
||||||
|
files = self.get_files()
|
||||||
|
downloaded = []
|
||||||
|
for file in files:
|
||||||
|
print(f"Downloading file '{file['title']}'")
|
||||||
|
url = f"{self.base_url}{file['path']}"
|
||||||
|
response = self.session.get(url)
|
||||||
|
if response.status_code == 200:
|
||||||
|
file_filename = os.path.join(path, file['title'])
|
||||||
|
with open(file_filename, 'wb') as f:
|
||||||
|
f.write(response.content)
|
||||||
|
downloaded.append(file_filename)
|
||||||
|
else:
|
||||||
|
print(f"Failed to download file '{file['title']}'")
|
||||||
|
return downloaded
|
||||||
|
|
||||||
|
def submit(self, files: list[str], judge: bool = True, wait: bool = True, silent: bool = True) -> Optional[dict]:
|
||||||
|
"""
|
||||||
|
Submit files to this assignment.
|
||||||
|
Returns a dictionary of test case results or None if wait is False.
|
||||||
|
"""
|
||||||
|
if not self.submitable:
|
||||||
|
raise ValueError(f"Cannot submit to non-submittable item '{self.title}'.")
|
||||||
|
|
||||||
|
form = self._raw.find("form")
|
||||||
|
if not form:
|
||||||
|
raise ValueError("Submission form not found.")
|
||||||
|
|
||||||
|
url = f"{self.base_url}{form['action']}"
|
||||||
|
file_types = loads(form.get("data-suffixes", "{}"))
|
||||||
|
|
||||||
|
if isinstance(files, str):
|
||||||
|
files = [files]
|
||||||
|
|
||||||
|
packaged_files = []
|
||||||
|
data = {}
|
||||||
|
found_type = ""
|
||||||
|
|
||||||
|
for file in files:
|
||||||
|
for suffix, lang in file_types.items():
|
||||||
|
if file.endswith(suffix):
|
||||||
|
found_type = lang
|
||||||
|
break
|
||||||
|
if not found_type:
|
||||||
|
print("WARNING: File type not recognized")
|
||||||
|
|
||||||
|
with open(file, "rb") as f:
|
||||||
|
packaged_files.append((found_type, (file, f.read())))
|
||||||
|
|
||||||
|
data = {
|
||||||
|
"judgenow": "true" if judge else "false",
|
||||||
|
"judgeLanguage": found_type if found_type else "none"
|
||||||
|
}
|
||||||
|
|
||||||
|
if not silent:
|
||||||
|
print(f"Submitting to {self.title}")
|
||||||
|
for file in files:
|
||||||
|
print(f"• {file}")
|
||||||
|
|
||||||
|
resp = self.session.post(url, files=packaged_files, data=data)
|
||||||
|
|
||||||
|
if not wait or not judge:
|
||||||
|
return resp.url if "@submissions" in resp.url else None
|
||||||
|
|
||||||
|
return self.__wait_for_result(resp.url, not silent, [])
|
||||||
|
|
||||||
|
def __wait_for_result(self, url: str, verbose: bool, __printed: list) -> dict:
|
||||||
|
"""
|
||||||
|
Wait for the submission result and return the test case results.
|
||||||
|
"""
|
||||||
|
r = self.session.get(url)
|
||||||
|
soup = BeautifulSoup(r.text, "lxml")
|
||||||
|
return self.__parse_table(soup, url, verbose, __printed)
|
||||||
|
|
||||||
|
def __parse_table(self, soup: BeautifulSoup, url: str, verbose: bool, __printed: list) -> dict:
|
||||||
|
"""
|
||||||
|
Parse the results table from the submission result page.
|
||||||
|
"""
|
||||||
|
cases = soup.find_all("tr", class_="sub-casetop")
|
||||||
|
fail_pass = {}
|
||||||
|
for case in cases:
|
||||||
|
name = case.find("td", class_="sub-casename").text
|
||||||
|
status = case.find("td", class_="status-icon")
|
||||||
|
|
||||||
|
if "pending" in status.get("class"):
|
||||||
|
sleep(1)
|
||||||
|
return self.__wait_for_result(url, verbose, __printed)
|
||||||
|
|
||||||
|
statuses = {
|
||||||
|
"Passed": ("✅", True),
|
||||||
|
"Wrong output": ("❌", False),
|
||||||
|
"No status": ("🐛", None),
|
||||||
|
"error": ("🐛", None),
|
||||||
|
}
|
||||||
|
|
||||||
|
found = False
|
||||||
|
for k, v in statuses.items():
|
||||||
|
if k in status.text:
|
||||||
|
found = True
|
||||||
|
if verbose and int(name) not in __printed:
|
||||||
|
print(f"{name}: {v[0]}")
|
||||||
|
fail_pass[int(name)] = v[1]
|
||||||
|
break
|
||||||
|
if not found:
|
||||||
|
fail_pass[int(name)] = None
|
||||||
|
if verbose and int(name) not in __printed:
|
||||||
|
print(f"{name}: Unrecognized status: {status.text}")
|
||||||
|
|
||||||
|
__printed.append(int(name))
|
||||||
|
return fail_pass
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"Group({self.title}, submitable={self.submitable})"
|
94
temmies/submission.py
Normal file
94
temmies/submission.py
Normal file
@ -0,0 +1,94 @@
|
|||||||
|
# submission.py
|
||||||
|
|
||||||
|
"""
|
||||||
|
File to define the Submission class
|
||||||
|
"""
|
||||||
|
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
|
class Submission:
|
||||||
|
"""
|
||||||
|
Submission class
|
||||||
|
|
||||||
|
Methods:
|
||||||
|
get_test_cases: Get a dict of test cases status
|
||||||
|
get_info: Submission information (in details)
|
||||||
|
get_files: Get a list of uploaded files (as names)
|
||||||
|
"""
|
||||||
|
def __init__(self, url: str, session):
|
||||||
|
self.url = "https://themis.housing.rug.nl" + url
|
||||||
|
self.__session = session
|
||||||
|
self.__request = self.__session.get(self.url)
|
||||||
|
self.__raw = BeautifulSoup(self.__request.text, "lxml")
|
||||||
|
self.__info = None
|
||||||
|
|
||||||
|
def __clean(self, text: str, value: bool = False) -> str:
|
||||||
|
"""Clean text"""
|
||||||
|
clean = text.replace("\t", "").replace("\n", "")
|
||||||
|
if value:
|
||||||
|
return clean.strip()
|
||||||
|
return clean.replace(" ", "_").replace(":", "").lower().strip()
|
||||||
|
|
||||||
|
def get_test_cases(self) -> dict[str, str]:
|
||||||
|
"""Get a dict of test cases status"""
|
||||||
|
cases = self.__raw.find("div", class_=lambda x: x and "sub-cases" in x.split())
|
||||||
|
if not cases:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
cases = cases.find("div", class_="cfg-container")
|
||||||
|
cases = cases.find("table")
|
||||||
|
|
||||||
|
results = {}
|
||||||
|
for entry in cases.find_all("tr", class_="sub-casetop"):
|
||||||
|
name = entry.find("td", class_="sub-casename").text
|
||||||
|
status = entry.find(
|
||||||
|
"td", class_=lambda x: x and "status-icon" in x.split()
|
||||||
|
).text
|
||||||
|
results[name.strip()] = self.__clean(status)
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
def get_info(self) -> dict[str, str] | None:
|
||||||
|
"""Submission information (in details)"""
|
||||||
|
if self.__info:
|
||||||
|
return self.__info
|
||||||
|
|
||||||
|
for div in self.__raw.find_all("div", class_="subsec round shade"):
|
||||||
|
h4 = div.find("h4", class_=lambda x: x and "info" in x.split())
|
||||||
|
if h4 and "Details" in h4.text:
|
||||||
|
info = div.find("div", class_="cfg-container")
|
||||||
|
info_lines = info.find_all("div", class_="cfg-line")
|
||||||
|
self.__info = {
|
||||||
|
self.__clean(
|
||||||
|
key := line.find("span", class_="cfg-key").text
|
||||||
|
): (
|
||||||
|
self.__clean(line.find("span", class_="cfg-val").text, value=True)
|
||||||
|
if "Files" not in key
|
||||||
|
else [
|
||||||
|
(self.__clean(a.text), a["href"])
|
||||||
|
for a in line.find("span", class_="cfg-val").find_all("a")
|
||||||
|
]
|
||||||
|
)
|
||||||
|
for line in info_lines
|
||||||
|
}
|
||||||
|
return self.__info
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_files(self) -> list[str] | None:
|
||||||
|
"""Get a list of uploaded files in the format [(name, url)]"""
|
||||||
|
if not self.__info:
|
||||||
|
self.__info = self.get_info()
|
||||||
|
return self.__info.get("files", None)
|
||||||
|
|
||||||
|
# Deprecated methods
|
||||||
|
def info(self):
|
||||||
|
print("This method is deprecated and will be deleted soon. Use get_info instead.")
|
||||||
|
return self.get_info()
|
||||||
|
|
||||||
|
def test_cases(self):
|
||||||
|
print("This method is deprecated and will be deleted in soon. Use get_test_cases instead.")
|
||||||
|
return self.get_test_cases()
|
||||||
|
|
||||||
|
def files(self):
|
||||||
|
print("This method is deprecated and will be deleted in soon. Use get_files instead.")
|
||||||
|
return self.get_files()
|
116
temmies/themis.py
Normal file
116
temmies/themis.py
Normal file
@ -0,0 +1,116 @@
|
|||||||
|
"""
|
||||||
|
Main class for the Themis API using the new JSON endpoints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import keyring
|
||||||
|
import getpass
|
||||||
|
from requests import Session
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
from .year import Year
|
||||||
|
from .exceptions.illegal_action import IllegalAction
|
||||||
|
|
||||||
|
class Themis:
|
||||||
|
"""
|
||||||
|
Main class for interacting with Themis.
|
||||||
|
- login: Login to Themis
|
||||||
|
- get_year: Get a year object
|
||||||
|
- all_years: Get all years
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, user: str):
|
||||||
|
"""
|
||||||
|
Initialize Themis object, logging in with the given user.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
user (str): Username to login with.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
user (str): Username.
|
||||||
|
password (str): Password, retrieved from keyring.
|
||||||
|
base_url (str): Base URL of the Themis website.
|
||||||
|
session (requests.Session): Authenticated session.
|
||||||
|
"""
|
||||||
|
self.user = user
|
||||||
|
self.password = self.__get_password()
|
||||||
|
self.base_url = "https://themis.housing.rug.nl"
|
||||||
|
self.session = self.login(self.user, self.password)
|
||||||
|
def __get_password(self) -> str:
|
||||||
|
"""
|
||||||
|
Retrieve the password from the keyring, prompting the user if not found.
|
||||||
|
"""
|
||||||
|
password = keyring.get_password(f"{self.user}-temmies", self.user)
|
||||||
|
if not password:
|
||||||
|
print(f"Password for user '{self.user}' not found in keyring.")
|
||||||
|
password = getpass.getpass(prompt=f"Enter password for {self.user}: ")
|
||||||
|
keyring.set_password(f"{self.user}-temmies", self.user, password)
|
||||||
|
print("Password saved securely in keyring.")
|
||||||
|
return password
|
||||||
|
|
||||||
|
def login(self, user: str, passwd: str) -> Session:
|
||||||
|
"""
|
||||||
|
Login to Themis using the original method, parsing CSRF token from the login page.
|
||||||
|
"""
|
||||||
|
session = Session()
|
||||||
|
login_url = f"{self.base_url}/log/in"
|
||||||
|
|
||||||
|
user_agent = (
|
||||||
|
"Mozilla/5.0 (X11; Linux x86_64) "
|
||||||
|
"AppleWebKit/537.36 (KHTML, like Gecko) "
|
||||||
|
"Chromium/80.0.3987.160 Chrome/80.0.3987.163 Safari/537.36"
|
||||||
|
)
|
||||||
|
|
||||||
|
headers = {"user-agent": user_agent}
|
||||||
|
|
||||||
|
data = {"user": user, "password": passwd, "null": None}
|
||||||
|
|
||||||
|
# Get login page to retrieve CSRF token
|
||||||
|
response = session.get(login_url, headers=headers, verify=False)
|
||||||
|
if response.status_code != 200:
|
||||||
|
raise ConnectionError("Failed to connect to Themis login page.")
|
||||||
|
|
||||||
|
# Parse CSRF token from login page
|
||||||
|
soup = BeautifulSoup(response.text, "lxml")
|
||||||
|
csrf_input = soup.find("input", attrs={"name": "_csrf"})
|
||||||
|
if not csrf_input or not csrf_input.get("value"):
|
||||||
|
raise ValueError("Unable to retrieve CSRF token.")
|
||||||
|
csrf_token = csrf_input["value"]
|
||||||
|
data["_csrf"] = csrf_token
|
||||||
|
data["sudo"] = user.lower()
|
||||||
|
|
||||||
|
# Attempt login
|
||||||
|
response = session.post(login_url, data=data, headers=headers)
|
||||||
|
if "Invalid credentials" in response.text:
|
||||||
|
# Prompt for password again
|
||||||
|
print("Invalid credentials. Please try again.")
|
||||||
|
passwd = getpass.getpass(prompt="Enter password: ")
|
||||||
|
keyring.set_password(f'{self.user}-temmies', self.user, passwd)
|
||||||
|
return self.login(user, passwd)
|
||||||
|
elif "Welcome, logged in as" not in response.text:
|
||||||
|
raise ValueError("Login failed for an unknown reason.")
|
||||||
|
|
||||||
|
return session
|
||||||
|
|
||||||
|
def get_year(self, start_year: int = None, end_year: int = None) -> Year:
|
||||||
|
"""
|
||||||
|
Gets a Year object using the year path (e.g., 2023, 2024).
|
||||||
|
"""
|
||||||
|
year_path = f"{start_year}-{end_year}"
|
||||||
|
|
||||||
|
return Year(self.session, year_path)
|
||||||
|
|
||||||
|
def all_years(self) -> list:
|
||||||
|
"""
|
||||||
|
Gets all visible years as Year objects.
|
||||||
|
"""
|
||||||
|
navigation_url = f"{self.base_url}/api/navigation/"
|
||||||
|
response = self.session.get(navigation_url)
|
||||||
|
if response.status_code != 200:
|
||||||
|
raise ConnectionError("Failed to retrieve years from Themis API.")
|
||||||
|
|
||||||
|
years_data = response.json()
|
||||||
|
years = []
|
||||||
|
for year_info in years_data:
|
||||||
|
if year_info.get("visible", False):
|
||||||
|
year_path = year_info["path"].strip("/")
|
||||||
|
years.append(Year(self.session, year_path))
|
||||||
|
return years
|
68
temmies/year.py
Normal file
68
temmies/year.py
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
from .course import Course
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
class Year:
|
||||||
|
"""
|
||||||
|
Represents an academic year.
|
||||||
|
"""
|
||||||
|
def __init__(self, session, year_path: str):
|
||||||
|
self.session = session
|
||||||
|
self.year_path = year_path # e.g., '2023-2024'
|
||||||
|
self.base_url = "https://themis.housing.rug.nl"
|
||||||
|
self.api_url = f"{self.base_url}/api/navigation/{self.year_path}"
|
||||||
|
|
||||||
|
def all_courses(self) -> list:
|
||||||
|
"""
|
||||||
|
Gets all visible courses in this year.
|
||||||
|
"""
|
||||||
|
response = self.session.get(self.api_url)
|
||||||
|
if response.status_code != 200:
|
||||||
|
raise ConnectionError(f"Failed to retrieve courses for {self.year_path}.")
|
||||||
|
|
||||||
|
courses_data = response.json()
|
||||||
|
courses = []
|
||||||
|
for course_info in courses_data:
|
||||||
|
if course_info.get("visible", False):
|
||||||
|
course_path = course_info["path"]
|
||||||
|
course_title = course_info["title"]
|
||||||
|
courses.append(Course(self.session, course_path, course_title, self))
|
||||||
|
return courses
|
||||||
|
|
||||||
|
def get_course(self, course_title: str) -> Course:
|
||||||
|
"""
|
||||||
|
Gets a course by its title.
|
||||||
|
"""
|
||||||
|
all_courses = self.all_courses()
|
||||||
|
for course in all_courses:
|
||||||
|
if course.title == course_title:
|
||||||
|
return course
|
||||||
|
raise ValueError(f"Course '{course_title}' not found in year {self.year_path}.")
|
||||||
|
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
|
def get_course_by_tag(self, course_tag: str) -> Course:
|
||||||
|
"""
|
||||||
|
Gets a course by its tag (course identifier).
|
||||||
|
Constructs the course URL using the year and course tag.
|
||||||
|
"""
|
||||||
|
course_path = f"/{self.year_path}/{course_tag}"
|
||||||
|
course_url = f"{self.base_url}/course{course_path}"
|
||||||
|
|
||||||
|
response = self.session.get(course_url)
|
||||||
|
if response.status_code != 200:
|
||||||
|
raise ConnectionError(f"Failed to retrieve course with tag '{course_tag}' for year {self.year_path}. Tried {course_url}")
|
||||||
|
|
||||||
|
soup = BeautifulSoup(response.text, "lxml")
|
||||||
|
|
||||||
|
title_elements = soup.find_all("a", class_="fill accent large")
|
||||||
|
if title_elements:
|
||||||
|
title_element = title_elements[-1]
|
||||||
|
|
||||||
|
if title_element:
|
||||||
|
course_title = title_element.get_text(strip=True)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Could not retrieve course title for tag '{course_tag}' in year {self.year_path}.")
|
||||||
|
|
||||||
|
return Course(self.session, course_path, course_title, self)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"Year({self.year_path})"
|
Reference in New Issue
Block a user