mirror of
https://gitea.publichub.eu/oscar.krause/fastapi-dls.git
synced 2025-11-25 12:16:12 +00:00
Compare commits
107 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
699dbf6fac | ||
|
|
317699ff58 | ||
|
|
55446f7d9c | ||
|
|
88c78efcd9 | ||
|
|
fb3ac4291f | ||
|
|
15f14cac11 | ||
|
|
018d7c34fc | ||
|
|
1aee423120 | ||
|
|
a6b2f2a942 | ||
|
|
e33024db86 | ||
|
|
4ad15f0849 | ||
|
|
7bad0359af | ||
|
|
59a7c9f15a | ||
|
|
bc6d692f0a | ||
|
|
63c37c6334 | ||
|
|
fa2c06972e | ||
|
|
e4e6387b2a | ||
|
|
f2be9dca8d | ||
|
|
52dd425583 | ||
|
|
286399d79a | ||
|
|
4ab1a2ed22 | ||
|
|
459c0e21af | ||
|
|
98ef64211b | ||
|
|
0b4bb65546 | ||
|
|
47624f5019 | ||
|
|
2b9d7821c0 | ||
|
|
45f5108717 | ||
|
|
a7fe8b867e | ||
|
|
78214df9cc | ||
|
|
4245d5a582 | ||
|
|
9b5a387169 | ||
|
|
9377d5ce28 | ||
|
|
7489307db8 | ||
|
|
d41314e81d | ||
|
|
a1123d5451 | ||
|
|
93cf719454 | ||
|
|
0dc8f6c582 | ||
|
|
4b0219b85a | ||
|
|
8edbb25c16 | ||
|
|
49a24f0b68 | ||
|
|
8af3c8e2b3 | ||
|
|
3c321a202c | ||
|
|
1b7d8bc0dc | ||
|
|
23ccea538f | ||
|
|
c79455b84d | ||
|
|
35fc5ea6b0 | ||
|
|
6a54c05fbb | ||
|
|
c9ac915055 | ||
|
|
8c5850beda | ||
|
|
0d9e814d0d | ||
|
|
5438317fb7 | ||
|
|
21f19be8ab | ||
|
|
eff6aae25d | ||
|
|
6473655e57 | ||
|
|
c45aa1a2a8 | ||
|
|
1d0631417d | ||
|
|
847d3589c5 | ||
|
|
ca53a4e084 | ||
|
|
006d3a1833 | ||
|
|
ad3b622c23 | ||
|
|
e51d6bd391 | ||
|
|
78c1978dd5 | ||
|
|
4ebb4d790e | ||
|
|
11f1456538 | ||
|
|
be6797efc7 | ||
|
|
42fe066e1a | ||
|
|
9eb91cbe1a | ||
|
|
395884f643 | ||
|
|
ef542ec821 | ||
|
|
254e4ee08c | ||
|
|
07273c3ebd | ||
|
|
e04723d128 | ||
|
|
8f498f4960 | ||
|
|
dd69f60fd0 | ||
|
|
a5d599a52c | ||
|
|
66d203e72a | ||
|
|
7800bf73a8 | ||
|
|
5b39598487 | ||
|
|
ed59260a10 | ||
|
|
7c70d121be | ||
|
|
213e768708 | ||
|
|
0696900d67 | ||
|
|
4fb90a22e3 | ||
|
|
6aa197dcae | ||
|
|
46f6c9fe99 | ||
|
|
2baaeb561b | ||
|
|
867cd7018a | ||
|
|
9c686913dd | ||
|
|
d3c4dc3fb7 | ||
|
|
af8b1c2387 | ||
|
|
d37d96dc34 | ||
|
|
21d052523f | ||
|
|
22110df791 | ||
|
|
c7f354d50c | ||
|
|
3bdfc94527 | ||
|
|
9473f10653 | ||
|
|
e9ad1d7791 | ||
|
|
f97ee9c8fc | ||
|
|
236948e483 | ||
|
|
948934ad0e | ||
|
|
3ef14e5522 | ||
|
|
ee50ede2ea | ||
|
|
b11579de98 | ||
|
|
dc33c29158 | ||
|
|
6f9107087b | ||
|
|
01fd954252 | ||
|
|
995dbdac80 |
11
.DEBIAN/requirements-bookworm-12.txt
Normal file
11
.DEBIAN/requirements-bookworm-12.txt
Normal file
@@ -0,0 +1,11 @@
|
||||
# https://packages.debian.org/hu/
|
||||
fastapi==0.92.0
|
||||
uvicorn[standard]==0.17.6
|
||||
python-jose[pycryptodome]==3.3.0
|
||||
pycryptodome==3.11.0
|
||||
python-dateutil==2.8.2
|
||||
sqlalchemy==1.4.46
|
||||
markdown==3.4.1
|
||||
python-dotenv==0.21.0
|
||||
jinja2==3.1.2
|
||||
httpx==0.23.3
|
||||
10
.DEBIAN/requirements-ubuntu-24.04.txt
Normal file
10
.DEBIAN/requirements-ubuntu-24.04.txt
Normal file
@@ -0,0 +1,10 @@
|
||||
# https://packages.ubuntu.com
|
||||
fastapi==0.101.0
|
||||
uvicorn[standard]==0.27.1
|
||||
python-jose[pycryptodome]==3.3.0
|
||||
pycryptodome==3.20.0
|
||||
python-dateutil==2.8.2
|
||||
sqlalchemy==1.4.50
|
||||
markdown==3.5.2
|
||||
python-dotenv==1.0.1
|
||||
jinja2==3.1.2
|
||||
10
.DEBIAN/requirements-ubuntu-24.10.txt
Normal file
10
.DEBIAN/requirements-ubuntu-24.10.txt
Normal file
@@ -0,0 +1,10 @@
|
||||
# https://packages.ubuntu.com
|
||||
fastapi==0.110.3
|
||||
uvicorn[standard]==0.30.3
|
||||
python-jose[pycryptodome]==3.3.0
|
||||
pycryptodome==3.20.0
|
||||
python-dateutil==2.9.0
|
||||
sqlalchemy==2.0.32
|
||||
markdown==3.6
|
||||
python-dotenv==1.0.1
|
||||
jinja2==3.1.3
|
||||
@@ -12,7 +12,7 @@ depends=('python' 'python-jose' 'python-starlette' 'python-httpx' 'python-fastap
|
||||
provider=("$pkgname")
|
||||
install="$pkgname.install"
|
||||
backup=('etc/default/fastapi-dls')
|
||||
source=('git+file:///builds/oscar.krause/fastapi-dls' # https://gitea.publichub.eu/oscar.krause/fastapi-dls.git
|
||||
source=("git+file://${CI_PROJECT_DIR}"
|
||||
"$pkgname.default"
|
||||
"$pkgname.service"
|
||||
"$pkgname.tmpfiles")
|
||||
@@ -48,6 +48,7 @@ package() {
|
||||
install -Dm755 "$srcdir/$pkgname/app/main.py" "$pkgdir/opt/$pkgname/main.py"
|
||||
install -Dm755 "$srcdir/$pkgname/app/orm.py" "$pkgdir/opt/$pkgname/orm.py"
|
||||
install -Dm755 "$srcdir/$pkgname/app/util.py" "$pkgdir/opt/$pkgname/util.py"
|
||||
install -Dm755 "$srcdir/$pkgname/app/middleware.py" "$pkgdir/opt/$pkgname/middleware.py"
|
||||
install -Dm644 "$srcdir/$pkgname.default" "$pkgdir/etc/default/$pkgname"
|
||||
install -Dm644 "$srcdir/$pkgname.service" "$pkgdir/usr/lib/systemd/system/$pkgname.service"
|
||||
install -Dm644 "$srcdir/$pkgname.tmpfiles" "$pkgdir/usr/lib/tmpfiles.d/$pkgname.conf"
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
version: "2"
|
||||
plugins:
|
||||
bandit:
|
||||
enabled: true
|
||||
sonar-python:
|
||||
enabled: true
|
||||
pylint:
|
||||
enabled: true
|
||||
config:
|
||||
tests_patterns:
|
||||
- test/**
|
||||
|
||||
@@ -20,6 +20,7 @@ build:docker:
|
||||
changes:
|
||||
- app/**/*
|
||||
- Dockerfile
|
||||
- requirements.txt
|
||||
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
|
||||
tags: [ docker ]
|
||||
before_script:
|
||||
@@ -126,16 +127,35 @@ build:pacman:
|
||||
- "*.pkg.tar.zst"
|
||||
|
||||
test:
|
||||
image: python:3.11-slim-bullseye
|
||||
image: $IMAGE
|
||||
stage: test
|
||||
interruptible: true
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
- if: $CI_COMMIT_TAG
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
|
||||
changes:
|
||||
- app/**/*
|
||||
- test/**/*
|
||||
variables:
|
||||
DATABASE: sqlite:///../app/db.sqlite
|
||||
parallel:
|
||||
matrix:
|
||||
- IMAGE: [ 'python:3.12-slim-bookworm' ]
|
||||
REQUIREMENTS: [ 'requirements.txt' ]
|
||||
- IMAGE: [ 'debian:bookworm' ] # EOL: June 06, 2026
|
||||
REQUIREMENTS: [ '.DEBIAN/requirements-bookworm-12.txt' ]
|
||||
- IMAGE: [ 'ubuntu:24.04' ] # EOL: April 2036
|
||||
REQUIREMENTS: [ '.DEBIAN/requirements-ubuntu-24.04.txt' ]
|
||||
- IMAGE: [ 'ubuntu:24.10' ]
|
||||
REQUIREMENTS: [ '.DEBIAN/requirements-ubuntu-24.10.txt' ]
|
||||
before_script:
|
||||
- pip install -r requirements.txt
|
||||
- apt-get update && apt-get install -y python3-dev python3-pip python3-venv gcc
|
||||
- python3 -m venv venv
|
||||
- source venv/bin/activate
|
||||
- pip install --upgrade pip
|
||||
- pip install -r $REQUIREMENTS
|
||||
- pip install pytest httpx
|
||||
- mkdir -p app/cert
|
||||
- openssl genrsa -out app/cert/instance.private.pem 2048
|
||||
@@ -148,7 +168,7 @@ test:
|
||||
dotenv: version.env
|
||||
junit: ['**/report.xml']
|
||||
|
||||
.test:linux:
|
||||
.test:apt:
|
||||
stage: test
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
|
||||
@@ -187,15 +207,15 @@ test:
|
||||
- apt-get purge -qq -y fastapi-dls
|
||||
- apt-get autoremove -qq -y && apt-get clean -qq
|
||||
|
||||
test:debian:
|
||||
extends: .test:linux
|
||||
test:apt:debian:
|
||||
extends: .test:apt
|
||||
image: debian:bookworm-slim
|
||||
|
||||
test:ubuntu:
|
||||
extends: .test:linux
|
||||
image: ubuntu:23.04
|
||||
test:apt:ubuntu:
|
||||
extends: .test:apt
|
||||
image: ubuntu:24.04
|
||||
|
||||
test:archlinux:
|
||||
test:pacman:archlinux:
|
||||
image: archlinux:base
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
|
||||
@@ -211,10 +231,13 @@ test:archlinux:
|
||||
- pacman -U --noconfirm *.pkg.tar.zst
|
||||
|
||||
code_quality:
|
||||
variables:
|
||||
SOURCE_CODE: app
|
||||
rules:
|
||||
- if: $CODE_QUALITY_DISABLED
|
||||
when: never
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
|
||||
secret_detection:
|
||||
rules:
|
||||
@@ -229,12 +252,25 @@ semgrep-sast:
|
||||
- if: $SAST_DISABLED
|
||||
when: never
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
|
||||
test_coverage:
|
||||
extends: test
|
||||
# extends: test
|
||||
image: python:3.12-slim-bookworm
|
||||
allow_failure: true
|
||||
stage: test
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
variables:
|
||||
DATABASE: sqlite:///../app/db.sqlite
|
||||
before_script:
|
||||
- apt-get update && apt-get install -y python3-dev gcc
|
||||
- pip install -r requirements.txt
|
||||
- pip install pytest httpx
|
||||
- mkdir -p app/cert
|
||||
- openssl genrsa -out app/cert/instance.private.pem 2048
|
||||
- openssl rsa -in app/cert/instance.private.pem -outform PEM -pubout -out app/cert/instance.public.pem
|
||||
- cd test
|
||||
script:
|
||||
- pip install pytest pytest-cov
|
||||
- coverage run -m pytest main.py
|
||||
@@ -259,6 +295,7 @@ gemnasium-python-dependency_scanning:
|
||||
- if: $DEPENDENCY_SCANNING_DISABLED
|
||||
when: never
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
|
||||
.deploy:
|
||||
rules:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.11-alpine
|
||||
FROM python:3.12-alpine
|
||||
|
||||
ARG VERSION
|
||||
ARG COMMIT=""
|
||||
@@ -10,7 +10,7 @@ RUN apk update \
|
||||
&& apk add --no-cache --virtual build-deps gcc g++ python3-dev musl-dev pkgconfig \
|
||||
&& apk add --no-cache curl postgresql postgresql-dev mariadb-dev sqlite-dev \
|
||||
&& pip install --no-cache-dir --upgrade uvicorn \
|
||||
&& pip install --no-cache-dir psycopg2==2.9.6 mysqlclient==2.2.0 pysqlite3==0.5.1 \
|
||||
&& pip install --no-cache-dir psycopg2==2.9.10 mysqlclient==2.2.6 pysqlite3==0.5.4 \
|
||||
&& pip install --no-cache-dir -r /tmp/requirements.txt \
|
||||
&& apk del build-deps
|
||||
|
||||
|
||||
164
README.md
164
README.md
@@ -2,22 +2,32 @@
|
||||
|
||||
Minimal Delegated License Service (DLS).
|
||||
|
||||
Compatibility tested with official NLS 2.0.1, 2.1.0, 3.1.0. For Driver compatibility see [here](#setup-client).
|
||||
Compatibility tested with official NLS 2.0.1, 2.1.0, 3.1.0, 3.3.1. For Driver compatibility
|
||||
see [compatibility matrix](#vgpu-software-compatibility-matrix).
|
||||
|
||||
This service can be used without internet connection.
|
||||
Only the clients need a connection to this service on configured port.
|
||||
|
||||
**Official Links**
|
||||
|
||||
- https://git.collinwebdesigns.de/oscar.krause/fastapi-dls (Private Git)
|
||||
- https://gitea.publichub.eu/oscar.krause/fastapi-dls (Public Git)
|
||||
- https://hub.docker.com/r/collinwebdesigns/fastapi-dls (Docker-Hub `collinwebdesigns/fastapi-dls:latest`)
|
||||
* https://git.collinwebdesigns.de/oscar.krause/fastapi-dls (Private Git)
|
||||
* https://gitea.publichub.eu/oscar.krause/fastapi-dls (Public Git)
|
||||
* https://hub.docker.com/r/collinwebdesigns/fastapi-dls (Docker-Hub `collinwebdesigns/fastapi-dls:latest`)
|
||||
|
||||
*All other repositories are forks! (which is no bad - just for information and bug reports)*
|
||||
|
||||
[Releases & Release Notes](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/releases)
|
||||
|
||||
**Further Reading**
|
||||
|
||||
* [NVIDIA vGPU Guide](https://gitlab.com/polloloco/vgpu-proxmox) - This document serves as a guide to install NVIDIA vGPU host drivers on the latest Proxmox VE version
|
||||
* [vgpu_unlock](https://github.com/DualCoder/vgpu_unlock) - Unlock vGPU functionality for consumer-grade Nvidia GPUs.
|
||||
* [vGPU_Unlock Wiki](https://docs.google.com/document/d/1pzrWJ9h-zANCtyqRgS7Vzla0Y8Ea2-5z2HEi4X75d2Q) - Guide for `vgpu_unlock`
|
||||
* [Proxmox All-In-One Installer Script](https://wvthoog.nl/proxmox-vgpu-v3/) - Also known as `proxmox-installer.sh`
|
||||
|
||||
---
|
||||
|
||||
[[_TOC_]]
|
||||
[TOC]
|
||||
|
||||
# Setup (Service)
|
||||
|
||||
@@ -33,6 +43,9 @@ Tested with Ubuntu 22.10 (EOL!) (from Proxmox templates), actually its consuming
|
||||
|
||||
- Make sure your timezone is set correct on you fastapi-dls server and your client
|
||||
|
||||
This guide does not show how to install vGPU host drivers! Look at the official documentation packed with the driver
|
||||
releases.
|
||||
|
||||
## Docker
|
||||
|
||||
Docker-Images are available here for Intel (x86), AMD (amd64) and ARM (arm64):
|
||||
@@ -102,10 +115,10 @@ volumes:
|
||||
dls-db:
|
||||
```
|
||||
|
||||
## Debian/Ubuntu/macOS (manual method using `git clone` and python virtual environment)
|
||||
## Debian / Ubuntu / macOS (manual method using `git clone` and python virtual environment)
|
||||
|
||||
Tested on `Debian 11 (bullseye)` and `macOS Ventura (13.6)`, Ubuntu may also work. **Please note that setup on macOS
|
||||
differs from Debian based systems.**
|
||||
Tested on `Debian 11 (bullseye)`, `Debian 12 (bookworm)` and `macOS Ventura (13.6)`, Ubuntu may also work.
|
||||
**Please note that setup on macOS differs from Debian based systems.**
|
||||
|
||||
**Make sure you are logged in as root.**
|
||||
|
||||
@@ -309,7 +322,7 @@ EOF
|
||||
Now you have to run `systemctl daemon-reload`. After that you can start service
|
||||
with `systemctl start fastapi-dls.service` and enable autostart with `systemctl enable fastapi-dls.service`.
|
||||
|
||||
## Debian/Ubuntu (using `dpkg`)
|
||||
## Debian / Ubuntu (using `dpkg` / `apt`)
|
||||
|
||||
Packages are available here:
|
||||
|
||||
@@ -317,9 +330,11 @@ Packages are available here:
|
||||
|
||||
Successful tested with:
|
||||
|
||||
- Debian 12 (Bookworm)
|
||||
- Ubuntu 22.10 (Kinetic Kudu) (EOL!)
|
||||
- Ubuntu 23.04 (Lunar)
|
||||
- **Debian 12 (Bookworm)** (EOL: June 06, 2026)
|
||||
- *Ubuntu 22.10 (Kinetic Kudu)* (EOL: July 20, 2023)
|
||||
- *Ubuntu 23.04 (Lunar Lobster)* (EOL: January 2024)
|
||||
- *Ubuntu 23.10 (Mantic Minotaur)* (EOL: July 2024)
|
||||
- **Ubuntu 24.04 (Noble Numbat)** (EOL: April 2036)
|
||||
|
||||
Not working with:
|
||||
|
||||
@@ -395,48 +410,37 @@ After first success you have to replace `--issue` with `--renew`.
|
||||
|
||||
# Configuration
|
||||
|
||||
| Variable | Default | Usage |
|
||||
|------------------------|----------------------------------------|------------------------------------------------------------------------------------------------------|
|
||||
| `DEBUG` | `false` | Toggles `fastapi` debug mode |
|
||||
| `DLS_URL` | `localhost` | Used in client-token to tell guest driver where dls instance is reachable |
|
||||
| `DLS_PORT` | `443` | Used in client-token to tell guest driver where dls instance is reachable |
|
||||
| `TOKEN_EXPIRE_DAYS` | `1` | Client auth-token validity (used for authenticate client against api, **not `.tok` file!**) |
|
||||
| `LEASE_EXPIRE_DAYS` | `90` | Lease time in days |
|
||||
| `LEASE_RENEWAL_PERIOD` | `0.15` | The percentage of the lease period that must elapse before a licensed client can renew a license \*1 |
|
||||
| `DATABASE` | `sqlite:///db.sqlite` | See [official SQLAlchemy docs](https://docs.sqlalchemy.org/en/14/core/engines.html) |
|
||||
| `CORS_ORIGINS` | `https://{DLS_URL}` | Sets `Access-Control-Allow-Origin` header (comma separated string) \*2 |
|
||||
| `SITE_KEY_XID` | `00000000-0000-0000-0000-000000000000` | Site identification uuid |
|
||||
| `INSTANCE_REF` | `10000000-0000-0000-0000-000000000001` | Instance identification uuid |
|
||||
| `ALLOTMENT_REF` | `20000000-0000-0000-0000-000000000001` | Allotment identification uuid |
|
||||
| `INSTANCE_KEY_RSA` | `<app-dir>/cert/instance.private.pem` | Site-wide private RSA key for singing JWTs \*3 |
|
||||
| `INSTANCE_KEY_PUB` | `<app-dir>/cert/instance.public.pem` | Site-wide public key \*3 |
|
||||
| Variable | Default | Usage |
|
||||
|--------------------------|----------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `DEBUG` | `false` | Toggles `fastapi` debug mode |
|
||||
| `DLS_URL` | `localhost` | Used in client-token to tell guest driver where dls instance is reachable |
|
||||
| `DLS_PORT` | `443` | Used in client-token to tell guest driver where dls instance is reachable |
|
||||
| `TOKEN_EXPIRE_DAYS` | `1` | Client auth-token validity (used for authenticate client against api, **not `.tok` file!**) |
|
||||
| `LEASE_EXPIRE_DAYS` | `90` | Lease time in days |
|
||||
| `LEASE_RENEWAL_PERIOD` | `0.15` | The percentage of the lease period that must elapse before a licensed client can renew a license \*1 |
|
||||
| `DATABASE` | `sqlite:///db.sqlite` | See [official SQLAlchemy docs](https://docs.sqlalchemy.org/en/14/core/engines.html) |
|
||||
| `CORS_ORIGINS` | `https://{DLS_URL}` | Sets `Access-Control-Allow-Origin` header (comma separated string) \*2 |
|
||||
| `SITE_KEY_XID` | `00000000-0000-0000-0000-000000000000` | Site identification uuid |
|
||||
| `INSTANCE_REF` | `10000000-0000-0000-0000-000000000001` | Instance identification uuid |
|
||||
| `ALLOTMENT_REF` | `20000000-0000-0000-0000-000000000001` | Allotment identification uuid |
|
||||
| `INSTANCE_KEY_RSA` | `<app-dir>/cert/instance.private.pem` | Site-wide private RSA key for singing JWTs \*3 |
|
||||
| `INSTANCE_KEY_PUB` | `<app-dir>/cert/instance.public.pem` | Site-wide public key \*3 |
|
||||
| `SUPPORT_MALFORMED_JSON` | `false` | Support parsing for mal formatted "mac_address_list" ([Issue](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/issues/1)) |
|
||||
|
||||
\*1 For example, if the lease period is one day and the renewal period is 20%, the client attempts to renew its license
|
||||
every 4.8 hours. If network connectivity is lost, the loss of connectivity is detected during license renewal and the
|
||||
client has 19.2 hours in which to re-establish connectivity before its license expires.
|
||||
|
||||
\*3 Always use `https`, since guest-drivers only support secure connections!
|
||||
\*2 Always use `https`, since guest-drivers only support secure connections!
|
||||
|
||||
\*4 If you recreate instance keys you need to **recreate client-token for each guest**!
|
||||
\*3 If you recreate your instance keys you need to **recreate client-token for each guest**!
|
||||
|
||||
# Setup (Client)
|
||||
|
||||
**The token file has to be copied! It's not enough to C&P file contents, because there can be special characters.**
|
||||
|
||||
Successfully tested with this package versions:
|
||||
|
||||
| vGPU Suftware | Linux vGPU Manager | Linux Driver | Windows Driver | Release Date |
|
||||
|---------------|--------------------|--------------|----------------|---------------|
|
||||
| `16.1` | `535.54.06` | `535.54.03` | `536.25` | August 2023 |
|
||||
| `16.0` | `535.104.06` | `535.104.05` | `537.13` | July 2023 |
|
||||
| `15.3` | `525.125.03` | `525.125.06` | `529.11` | June 2023 |
|
||||
| `15.2` | `525.105.14` | `525.105.17` | `528.89` | March 2023 |
|
||||
| `15.1` | `525.85.07` | `525.85.05` | `528.24` | January 2023 |
|
||||
| `15.0` | `525.60.12` | `525.60.13` | `527.41` | December 2022 |
|
||||
| `14.4` | `510.108.03` | `510.108.03` | `514.08` | December 2022 |
|
||||
| `14.3` | `510.108.03` | `510.108.03` | `513.91` | November 2022 |
|
||||
|
||||
- https://docs.nvidia.com/grid/index.html
|
||||
This guide does not show how to install vGPU guest drivers! Look at the official documentation packed with the driver
|
||||
releases.
|
||||
|
||||
## Linux
|
||||
|
||||
@@ -512,33 +516,32 @@ Done. For more information check [troubleshoot section](#troubleshoot).
|
||||
8. Set schedule to `At First Array Start Only`
|
||||
9. Click on Apply
|
||||
|
||||
|
||||
# Endpoints
|
||||
# API Endpoints
|
||||
|
||||
<details>
|
||||
<summary>show</summary>
|
||||
|
||||
### `GET /`
|
||||
**`GET /`**
|
||||
|
||||
Redirect to `/-/readme`.
|
||||
|
||||
### `GET /-/health`
|
||||
**`GET /-/health`**
|
||||
|
||||
Status endpoint, used for *healthcheck*.
|
||||
|
||||
### `GET /-/config`
|
||||
**`GET /-/config`**
|
||||
|
||||
Shows current runtime environment variables and their values.
|
||||
|
||||
### `GET /-/readme`
|
||||
**`GET /-/readme`**
|
||||
|
||||
HTML rendered README.md.
|
||||
|
||||
### `GET /-/manage`
|
||||
**`GET /-/manage`**
|
||||
|
||||
Shows a very basic UI to delete origins or leases.
|
||||
|
||||
### `GET /-/origins?leases=false`
|
||||
**`GET /-/origins?leases=false`**
|
||||
|
||||
List registered origins.
|
||||
|
||||
@@ -546,11 +549,11 @@ List registered origins.
|
||||
|-----------------|---------|--------------------------------------|
|
||||
| `leases` | `false` | Include referenced leases per origin |
|
||||
|
||||
### `DELETE /-/origins`
|
||||
**`DELETE /-/origins`**
|
||||
|
||||
Deletes all origins and their leases.
|
||||
|
||||
### `GET /-/leases?origin=false`
|
||||
**`GET /-/leases?origin=false`**
|
||||
|
||||
List current leases.
|
||||
|
||||
@@ -558,20 +561,20 @@ List current leases.
|
||||
|-----------------|---------|-------------------------------------|
|
||||
| `origin` | `false` | Include referenced origin per lease |
|
||||
|
||||
### `DELETE /-/lease/{lease_ref}`
|
||||
**`DELETE /-/lease/{lease_ref}`**
|
||||
|
||||
Deletes an lease.
|
||||
|
||||
### `GET /-/client-token`
|
||||
**`GET /-/client-token`**
|
||||
|
||||
Generate client token, (see [installation](#installation)).
|
||||
|
||||
### Others
|
||||
**Others**
|
||||
|
||||
There are many other internal api endpoints for handling authentication and lease process.
|
||||
</details>
|
||||
|
||||
# Troubleshoot
|
||||
# Troubleshoot / Debug
|
||||
|
||||
**Please make sure that fastapi-dls and your guests are on the same timezone!**
|
||||
|
||||
@@ -593,9 +596,9 @@ Logs are available in `C:\Users\Public\Documents\Nvidia\LoggingLog.NVDisplay.Con
|
||||
|
||||
## Linux
|
||||
|
||||
### `uvicorn.error:Invalid HTTP request received.`
|
||||
### Invalid HTTP request
|
||||
|
||||
This message can be ignored.
|
||||
This error message: `uvicorn.error:Invalid HTTP request received.` can be ignored.
|
||||
|
||||
- Ref. https://github.com/encode/uvicorn/issues/441
|
||||
|
||||
@@ -721,12 +724,47 @@ The error message can safely be ignored (since we have no license limitation :P)
|
||||
|
||||
</details>
|
||||
|
||||
# vGPU Software Compatibility Matrix
|
||||
|
||||
Successfully tested with this package versions.
|
||||
|
||||
| vGPU Suftware | Driver Branch | Linux vGPU Manager | Linux Driver | Windows Driver | Release Date | EOL Date |
|
||||
|:-------------:|:-------------:|--------------------|--------------|----------------|--------------:|--------------:|
|
||||
| `17.4` | R550 | `550.127.06` | `550.127.05` | `553.24` | October 2024 | February 2025 |
|
||||
| `17.3` | R550 | `550.90.05` | `550.90.07` | `552.74` | July 2024 | |
|
||||
| `17.2` | R550 | `550.90.05` | `550.90.07` | `552.55` | June 2024 | |
|
||||
| `17.1` | R550 | `550.54.16` | `550.54.15` | `551.78` | March 2024 | |
|
||||
| `17.0` | R550 | `550.54.10` | `550.54.14` | `551.61` | February 2024 | |
|
||||
| `16.8` | R535 | `535.216.01` | `535.216.01` | `538.95` | October 2024 | July 2026 |
|
||||
| `16.7` | R535 | `535.183.04` | `535.183.06` | `538.78` | July 2024 | |
|
||||
| `16.6` | R535 | `535.183.04` | `535.183.01` | `538.67` | June 2024 | |
|
||||
| `16.5` | R535 | `535.161.05` | `535.161.08` | `538.46` | February 2024 | |
|
||||
| `16.4` | R535 | `535.161.05` | `535.161.07` | `538.33` | February 2024 | |
|
||||
| `16.3` | R535 | `535.154.02` | `535.154.05` | `538.15` | January 2024 | |
|
||||
| `16.2` | R535 | `535.129.03` | `535.129.03` | `537.70` | October 2023 | |
|
||||
| `16.1` | R535 | `535.104.06` | `535.104.05` | `537.13` | August 2023 | |
|
||||
| `16.0` | R535 | `535.54.06` | `535.54.03` | `536.22` | July 2023 | |
|
||||
| `15.4` | R525 | `525.147.01` | `525.147.05` | `529.19` | June 2023 | December 2023 |
|
||||
| `14.4` | R510 | `510.108.03` | `510.108.03` | `514.08` | December 2022 | February 2023 |
|
||||
|
||||
- https://docs.nvidia.com/grid/index.html
|
||||
- https://docs.nvidia.com/grid/gpus-supported-by-vgpu.html
|
||||
|
||||
*To get the latest drivers, visit Nvidia or search in Discord-Channel `GPU Unlocking` (Server-ID: `829786927829745685`)
|
||||
on channel `licensing`
|
||||
|
||||
# Credits
|
||||
|
||||
Thanks to vGPU community and all who uses this project and report bugs.
|
||||
|
||||
Special thanks to
|
||||
Special thanks to:
|
||||
|
||||
- @samicrusader who created build file for ArchLinux
|
||||
- @cyrus who wrote the section for openSUSE
|
||||
- @midi who wrote the section for unRAID
|
||||
- @samicrusader who created build file for **ArchLinux**
|
||||
- @cyrus who wrote the section for **openSUSE**
|
||||
- @midi who wrote the section for **unRAID**
|
||||
- @polloloco who wrote the *[NVIDIA vGPU Guide](https://gitlab.com/polloloco/vgpu-proxmox)*
|
||||
- @DualCoder who creates the `vgpu_unlock` functionality [vgpu_unlock](https://github.com/DualCoder/vgpu_unlock)
|
||||
- Krutav Shah who wrote the [vGPU_Unlock Wiki](https://docs.google.com/document/d/1pzrWJ9h-zANCtyqRgS7Vzla0Y8Ea2-5z2HEi4X75d2Q/)
|
||||
- Wim van 't Hoog for the [Proxmox All-In-One Installer Script](https://wvthoog.nl/proxmox-vgpu-v3/)
|
||||
|
||||
And thanks to all people who contributed to all these libraries!
|
||||
|
||||
85
app/main.py
85
app/main.py
@@ -1,39 +1,42 @@
|
||||
import logging
|
||||
from base64 import b64encode as b64enc
|
||||
from calendar import timegm
|
||||
from contextlib import asynccontextmanager
|
||||
from datetime import datetime, timedelta
|
||||
from hashlib import sha256
|
||||
from uuid import uuid4
|
||||
from os.path import join, dirname
|
||||
from json import loads as json_loads
|
||||
from os import getenv as env
|
||||
from os.path import join, dirname
|
||||
from uuid import uuid4
|
||||
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from dotenv import load_dotenv
|
||||
from fastapi import FastAPI
|
||||
from fastapi.requests import Request
|
||||
from json import loads as json_loads
|
||||
from datetime import datetime, timedelta
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from calendar import timegm
|
||||
from jose import jws, jwk, jwt, JWTError
|
||||
from jose.constants import ALGORITHMS
|
||||
from starlette.middleware.cors import CORSMiddleware
|
||||
from starlette.responses import StreamingResponse, JSONResponse as JSONr, HTMLResponse as HTMLr, Response, RedirectResponse
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from starlette.middleware.cors import CORSMiddleware
|
||||
from starlette.responses import StreamingResponse, JSONResponse as JSONr, HTMLResponse as HTMLr, Response, RedirectResponse
|
||||
|
||||
from util import load_key, load_file
|
||||
from orm import Origin, Lease, init as db_init, migrate
|
||||
from util import load_key, load_file
|
||||
|
||||
# Load variables
|
||||
load_dotenv('../version.env')
|
||||
|
||||
# Get current timezone
|
||||
TZ = datetime.now().astimezone().tzinfo
|
||||
|
||||
# Load basic variables
|
||||
VERSION, COMMIT, DEBUG = env('VERSION', 'unknown'), env('COMMIT', 'unknown'), bool(env('DEBUG', False))
|
||||
|
||||
config = dict(openapi_url=None, docs_url=None, redoc_url=None) # dict(openapi_url='/-/openapi.json', docs_url='/-/docs', redoc_url='/-/redoc')
|
||||
app = FastAPI(title='FastAPI-DLS', description='Minimal Delegated License Service (DLS).', version=VERSION, **config)
|
||||
# Database connection
|
||||
db = create_engine(str(env('DATABASE', 'sqlite:///db.sqlite')))
|
||||
db_init(db), migrate(db)
|
||||
|
||||
# everything prefixed with "INSTANCE_*" is used as "SERVICE_INSTANCE_*" or "SI_*" in official dls service
|
||||
# Load DLS variables (all prefixed with "INSTANCE_*" is used as "SERVICE_INSTANCE_*" or "SI_*" in official dls service)
|
||||
DLS_URL = str(env('DLS_URL', 'localhost'))
|
||||
DLS_PORT = int(env('DLS_PORT', '443'))
|
||||
SITE_KEY_XID = str(env('SITE_KEY_XID', '00000000-0000-0000-0000-000000000000'))
|
||||
@@ -51,6 +54,40 @@ CORS_ORIGINS = str(env('CORS_ORIGINS', '')).split(',') if (env('CORS_ORIGINS'))
|
||||
jwt_encode_key = jwk.construct(INSTANCE_KEY_RSA.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
|
||||
jwt_decode_key = jwk.construct(INSTANCE_KEY_PUB.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
|
||||
|
||||
# Logging
|
||||
LOG_LEVEL = logging.DEBUG if DEBUG else logging.INFO
|
||||
logging.basicConfig(format='[{levelname:^7}] [{module:^15}] {message}', style='{')
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.setLevel(LOG_LEVEL)
|
||||
logging.getLogger('util').setLevel(LOG_LEVEL)
|
||||
logging.getLogger('NV').setLevel(LOG_LEVEL)
|
||||
|
||||
|
||||
# FastAPI
|
||||
@asynccontextmanager
|
||||
async def lifespan(_: FastAPI):
|
||||
# on startup
|
||||
logger.info(f'''
|
||||
|
||||
Using timezone: {str(TZ)}. Make sure this is correct and match your clients!
|
||||
|
||||
Your clients renew their license every {str(Lease.calculate_renewal(LEASE_RENEWAL_PERIOD, LEASE_RENEWAL_DELTA))}.
|
||||
If the renewal fails, the license is {str(LEASE_RENEWAL_DELTA)} valid.
|
||||
|
||||
Your client-token file (.tok) is valid for {str(CLIENT_TOKEN_EXPIRE_DELTA)}.
|
||||
''')
|
||||
|
||||
logger.info(f'Debug is {"enabled" if DEBUG else "disabled"}.')
|
||||
|
||||
yield
|
||||
|
||||
# on shutdown
|
||||
logger.info(f'Shutting down ...')
|
||||
|
||||
|
||||
config = dict(openapi_url=None, docs_url=None, redoc_url=None) # dict(openapi_url='/-/openapi.json', docs_url='/-/docs', redoc_url='/-/redoc')
|
||||
app = FastAPI(title='FastAPI-DLS', description='Minimal Delegated License Service (DLS).', version=VERSION, lifespan=lifespan, **config)
|
||||
|
||||
app.debug = DEBUG
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
@@ -59,18 +96,22 @@ app.add_middleware(
|
||||
allow_methods=['*'],
|
||||
allow_headers=['*'],
|
||||
)
|
||||
if bool(env('SUPPORT_MALFORMED_JSON', False)):
|
||||
from middleware import PatchMalformedJsonMiddleware
|
||||
|
||||
logging.basicConfig()
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.setLevel(logging.DEBUG if DEBUG else logging.INFO)
|
||||
logger.info(f'Enabled "PatchMalformedJsonMiddleware"!')
|
||||
app.add_middleware(PatchMalformedJsonMiddleware, enabled=True)
|
||||
|
||||
|
||||
# Helper
|
||||
def __get_token(request: Request) -> dict:
|
||||
authorization_header = request.headers.get('authorization')
|
||||
token = authorization_header.split(' ')[1]
|
||||
return jwt.decode(token=token, key=jwt_decode_key, algorithms=ALGORITHMS.RS256, options={'verify_aud': False})
|
||||
|
||||
|
||||
# Endpoints
|
||||
|
||||
@app.get('/', summary='Index')
|
||||
async def index():
|
||||
return RedirectResponse('/-/readme')
|
||||
@@ -108,7 +149,7 @@ async def _config():
|
||||
@app.get('/-/readme', summary='* Readme')
|
||||
async def _readme():
|
||||
from markdown import markdown
|
||||
content = load_file('../README.md').decode('utf-8')
|
||||
content = load_file(join(dirname(__file__), '../README.md')).decode('utf-8')
|
||||
return HTMLr(markdown(text=content, extensions=['tables', 'fenced_code', 'md_in_html', 'nl2br', 'toc']))
|
||||
|
||||
|
||||
@@ -540,18 +581,6 @@ async def leasing_v1_lessor_shutdown(request: Request):
|
||||
return JSONr(response)
|
||||
|
||||
|
||||
@app.on_event('startup')
|
||||
async def app_on_startup():
|
||||
logger.info(f'''
|
||||
Using timezone: {str(TZ)}. Make sure this is correct and match your clients!
|
||||
|
||||
Your clients renew their license every {str(Lease.calculate_renewal(LEASE_RENEWAL_PERIOD, LEASE_RENEWAL_DELTA))}.
|
||||
If the renewal fails, the license is {str(LEASE_RENEWAL_DELTA)} valid.
|
||||
|
||||
Your client-token file (.tok) is valid for {str(CLIENT_TOKEN_EXPIRE_DELTA)}.
|
||||
''')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import uvicorn
|
||||
|
||||
|
||||
43
app/middleware.py
Normal file
43
app/middleware.py
Normal file
@@ -0,0 +1,43 @@
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
|
||||
from starlette.middleware.base import BaseHTTPMiddleware
|
||||
from starlette.requests import Request
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PatchMalformedJsonMiddleware(BaseHTTPMiddleware):
|
||||
# see oscar.krause/fastapi-dls#1
|
||||
|
||||
REGEX = '(\"mac_address_list\"\:\s?\[)([\w\d])'
|
||||
|
||||
def __init__(self, app, enabled: bool):
|
||||
super().__init__(app)
|
||||
self.enabled = enabled
|
||||
|
||||
async def dispatch(self, request: Request, call_next):
|
||||
body = await request.body()
|
||||
content_type = request.headers.get('Content-Type')
|
||||
|
||||
if self.enabled and content_type == 'application/json':
|
||||
body = body.decode()
|
||||
try:
|
||||
json.loads(body)
|
||||
except json.decoder.JSONDecodeError:
|
||||
logger.warning(f'Malformed json received! Try to fix it, "PatchMalformedJsonMiddleware" is enabled.')
|
||||
s = PatchMalformedJsonMiddleware.fix_json(body)
|
||||
logger.debug(f'Fixed JSON: "{s}"')
|
||||
s = json.loads(s) # ensure json is now valid
|
||||
# set new body
|
||||
request._body = json.dumps(s).encode('utf-8')
|
||||
|
||||
response = await call_next(request)
|
||||
return response
|
||||
|
||||
@staticmethod
|
||||
def fix_json(s: str) -> str:
|
||||
s = s.replace('\t', '')
|
||||
s = s.replace('\n', '')
|
||||
return re.sub(PatchMalformedJsonMiddleware.REGEX, r'\1"\2', s)
|
||||
29
app/orm.py
29
app/orm.py
@@ -1,10 +1,12 @@
|
||||
from datetime import datetime, timedelta
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from sqlalchemy import Column, VARCHAR, CHAR, ForeignKey, DATETIME, update, and_, inspect, text
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlalchemy.orm import sessionmaker, declarative_base
|
||||
|
||||
from util import NV
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
|
||||
@@ -23,6 +25,8 @@ class Origin(Base):
|
||||
return f'Origin(origin_ref={self.origin_ref}, hostname={self.hostname})'
|
||||
|
||||
def serialize(self) -> dict:
|
||||
_ = NV().find(self.guest_driver_version)
|
||||
|
||||
return {
|
||||
'origin_ref': self.origin_ref,
|
||||
# 'service_instance_xid': self.service_instance_xid,
|
||||
@@ -30,6 +34,7 @@ class Origin(Base):
|
||||
'guest_driver_version': self.guest_driver_version,
|
||||
'os_platform': self.os_platform,
|
||||
'os_version': self.os_version,
|
||||
'$driver': _ if _ is not None else None,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
@@ -61,7 +66,17 @@ class Origin(Base):
|
||||
if origin_refs is None:
|
||||
deletions = session.query(Origin).delete()
|
||||
else:
|
||||
deletions = session.query(Origin).filter(Origin.origin_ref in origin_refs).delete()
|
||||
deletions = session.query(Origin).filter(Origin.origin_ref.in_(origin_refs)).delete()
|
||||
session.commit()
|
||||
session.close()
|
||||
return deletions
|
||||
|
||||
@staticmethod
|
||||
def delete_expired(engine: Engine) -> int:
|
||||
session = sessionmaker(bind=engine)()
|
||||
origins = session.query(Origin).join(Lease, Origin.origin_ref == Lease.origin_ref, isouter=True).filter(Lease.lease_ref.is_(None)).all()
|
||||
origin_refs = [origin.origin_ref for origin in origins]
|
||||
deletions = session.query(Origin).filter(Origin.origin_ref.in_(origin_refs)).delete()
|
||||
session.commit()
|
||||
session.close()
|
||||
return deletions
|
||||
@@ -89,10 +104,10 @@ class Lease(Base):
|
||||
'lease_ref': self.lease_ref,
|
||||
'origin_ref': self.origin_ref,
|
||||
# 'scope_ref': self.scope_ref,
|
||||
'lease_created': self.lease_created.isoformat(),
|
||||
'lease_expires': self.lease_expires.isoformat(),
|
||||
'lease_updated': self.lease_updated.isoformat(),
|
||||
'lease_renewal': lease_renewal.isoformat(),
|
||||
'lease_created': self.lease_created.replace(tzinfo=timezone.utc).isoformat(),
|
||||
'lease_expires': self.lease_expires.replace(tzinfo=timezone.utc).isoformat(),
|
||||
'lease_updated': self.lease_updated.replace(tzinfo=timezone.utc).isoformat(),
|
||||
'lease_renewal': lease_renewal.replace(tzinfo=timezone.utc).isoformat(),
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
|
||||
60
app/util.py
60
app/util.py
@@ -1,10 +1,17 @@
|
||||
def load_file(filename) -> bytes:
|
||||
import logging
|
||||
|
||||
logging.basicConfig()
|
||||
|
||||
|
||||
def load_file(filename: str) -> bytes:
|
||||
log = logging.getLogger(f'{__name__}')
|
||||
log.debug(f'Loading contents of file "{filename}')
|
||||
with open(filename, 'rb') as file:
|
||||
content = file.read()
|
||||
return content
|
||||
|
||||
|
||||
def load_key(filename) -> "RsaKey":
|
||||
def load_key(filename: str) -> "RsaKey":
|
||||
try:
|
||||
# Crypto | Cryptodome on Debian
|
||||
from Crypto.PublicKey import RSA
|
||||
@@ -13,6 +20,8 @@ def load_key(filename) -> "RsaKey":
|
||||
from Cryptodome.PublicKey import RSA
|
||||
from Cryptodome.PublicKey.RSA import RsaKey
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.debug(f'Importing RSA-Key from "{filename}"')
|
||||
return RSA.import_key(extern_key=load_file(filename), passphrase=None)
|
||||
|
||||
|
||||
@@ -24,5 +33,50 @@ def generate_key() -> "RsaKey":
|
||||
except ModuleNotFoundError:
|
||||
from Cryptodome.PublicKey import RSA
|
||||
from Cryptodome.PublicKey.RSA import RsaKey
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.debug(f'Generating RSA-Key')
|
||||
return RSA.generate(bits=2048)
|
||||
|
||||
|
||||
class NV:
|
||||
__DRIVER_MATRIX_FILENAME = 'static/driver_matrix.json'
|
||||
__DRIVER_MATRIX: None | dict = None # https://docs.nvidia.com/grid/ => "Driver Versions"
|
||||
|
||||
def __init__(self):
|
||||
self.log = logging.getLogger(self.__class__.__name__)
|
||||
|
||||
if NV.__DRIVER_MATRIX is None:
|
||||
from json import load as json_load
|
||||
try:
|
||||
file = open(NV.__DRIVER_MATRIX_FILENAME)
|
||||
NV.__DRIVER_MATRIX = json_load(file)
|
||||
file.close()
|
||||
self.log.debug(f'Successfully loaded "{NV.__DRIVER_MATRIX_FILENAME}".')
|
||||
except Exception as e:
|
||||
NV.__DRIVER_MATRIX = {} # init empty dict to not try open file everytime, just when restarting app
|
||||
# self.log.warning(f'Failed to load "{NV.__DRIVER_MATRIX_FILENAME}": {e}')
|
||||
|
||||
@staticmethod
|
||||
def find(version: str) -> dict | None:
|
||||
if NV.__DRIVER_MATRIX is None:
|
||||
return None
|
||||
for idx, (key, branch) in enumerate(NV.__DRIVER_MATRIX.items()):
|
||||
for release in branch.get('$releases'):
|
||||
linux_driver = release.get('Linux Driver')
|
||||
windows_driver = release.get('Windows Driver')
|
||||
if version == linux_driver or version == windows_driver:
|
||||
tmp = branch.copy()
|
||||
tmp.pop('$releases')
|
||||
|
||||
is_latest = release.get('vGPU Software') == branch.get('Latest Release in Branch')
|
||||
|
||||
return {
|
||||
'software_branch': branch.get('vGPU Software Branch'),
|
||||
'branch_version': release.get('vGPU Software'),
|
||||
'driver_branch': branch.get('Driver Branch'),
|
||||
'branch_status': branch.get('vGPU Branch Status'),
|
||||
'release_date': release.get('Release Date'),
|
||||
'eol': branch.get('EOL Date') if is_latest else None,
|
||||
'is_latest': is_latest,
|
||||
}
|
||||
return None
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
fastapi==0.103.1
|
||||
uvicorn[standard]==0.23.2
|
||||
fastapi==0.115.5
|
||||
uvicorn[standard]==0.32.0
|
||||
python-jose==3.3.0
|
||||
pycryptodome==3.19.0
|
||||
pycryptodome==3.21.0
|
||||
python-dateutil==2.8.2
|
||||
sqlalchemy==2.0.21
|
||||
markdown==3.4.4
|
||||
python-dotenv==1.0.0
|
||||
sqlalchemy==2.0.36
|
||||
markdown==3.7
|
||||
python-dotenv==1.0.1
|
||||
|
||||
137
test/create_driver_matrix_json.py
Normal file
137
test/create_driver_matrix_json.py
Normal file
@@ -0,0 +1,137 @@
|
||||
import logging
|
||||
|
||||
logging.basicConfig()
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
URL = 'https://docs.nvidia.com/vgpu/index.html'
|
||||
|
||||
BRANCH_STATUS_KEY, SOFTWARE_BRANCH_KEY, = 'vGPU Branch Status', 'vGPU Software Branch'
|
||||
VGPU_KEY, GRID_KEY, DRIVER_BRANCH_KEY = 'vGPU Software', 'vGPU Software', 'Driver Branch'
|
||||
LINUX_VGPU_MANAGER_KEY, LINUX_DRIVER_KEY = 'Linux vGPU Manager', 'Linux Driver'
|
||||
WINDOWS_VGPU_MANAGER_KEY, WINDOWS_DRIVER_KEY = 'Windows vGPU Manager', 'Windows Driver'
|
||||
ALT_VGPU_MANAGER_KEY = 'vGPU Manager'
|
||||
RELEASE_DATE_KEY, LATEST_KEY, EOL_KEY = 'Release Date', 'Latest Release in Branch', 'EOL Date'
|
||||
JSON_RELEASES_KEY = '$releases'
|
||||
|
||||
|
||||
def __driver_versions(html: 'BeautifulSoup'):
|
||||
def __strip(_: str) -> str:
|
||||
# removes content after linebreak (e.g. "Hello\n World" to "Hello")
|
||||
_ = _.strip()
|
||||
tmp = _.split('\n')
|
||||
if len(tmp) > 0:
|
||||
return tmp[0]
|
||||
return _
|
||||
|
||||
# find wrapper for "DriverVersions" and find tables
|
||||
data = html.find('div', {'id': 'driver-versions'})
|
||||
items = data.findAll('bsp-accordion', {'class': 'Accordion-items-item'})
|
||||
for item in items:
|
||||
software_branch = item.find('div', {'class': 'Accordion-items-item-title'}).text.strip()
|
||||
software_branch = software_branch.replace(' Releases', '')
|
||||
matrix_key = software_branch.lower()
|
||||
|
||||
# driver version info from table-heads (ths) and table-rows (trs)
|
||||
table = item.find('table')
|
||||
ths, trs = table.find_all('th'), table.find_all('tr')
|
||||
headers, releases = [header.text.strip() for header in ths], []
|
||||
for trs in trs:
|
||||
tds = trs.find_all('td')
|
||||
if len(tds) == 0: # skip empty
|
||||
continue
|
||||
# create dict with table-heads as key and cell content as value
|
||||
x = {headers[i]: __strip(cell.text) for i, cell in enumerate(tds)}
|
||||
releases.append(x)
|
||||
|
||||
# add to matrix
|
||||
MATRIX.update({matrix_key: {JSON_RELEASES_KEY: releases}})
|
||||
|
||||
|
||||
def __release_branches(html: 'BeautifulSoup'):
|
||||
# find wrapper for "AllReleaseBranches" and find table
|
||||
data = html.find('div', {'id': 'all-release-branches'})
|
||||
table = data.find('table')
|
||||
|
||||
# branch releases info from table-heads (ths) and table-rows (trs)
|
||||
ths, trs = table.find_all('th'), table.find_all('tr')
|
||||
headers = [header.text.strip() for header in ths]
|
||||
for trs in trs:
|
||||
tds = trs.find_all('td')
|
||||
if len(tds) == 0: # skip empty
|
||||
continue
|
||||
# create dict with table-heads as key and cell content as value
|
||||
x = {headers[i]: cell.text.strip() for i, cell in enumerate(tds)}
|
||||
|
||||
# get matrix_key
|
||||
software_branch = x.get(SOFTWARE_BRANCH_KEY)
|
||||
matrix_key = software_branch.lower()
|
||||
|
||||
# add to matrix
|
||||
MATRIX.update({matrix_key: MATRIX.get(matrix_key) | x})
|
||||
|
||||
|
||||
def __debug():
|
||||
# print table head
|
||||
s = f'{SOFTWARE_BRANCH_KEY:^21} | {BRANCH_STATUS_KEY:^21} | {VGPU_KEY:^13} | {LINUX_VGPU_MANAGER_KEY:^21} | {LINUX_DRIVER_KEY:^21} | {WINDOWS_VGPU_MANAGER_KEY:^21} | {WINDOWS_DRIVER_KEY:^21} | {RELEASE_DATE_KEY:>21} | {EOL_KEY:>21}'
|
||||
print(s)
|
||||
|
||||
# iterate over dict & format some variables to not overload table
|
||||
for idx, (key, branch) in enumerate(MATRIX.items()):
|
||||
branch_status = branch.get(BRANCH_STATUS_KEY)
|
||||
branch_status = branch_status.replace('Branch ', '')
|
||||
branch_status = branch_status.replace('Long-Term Support', 'LTS')
|
||||
branch_status = branch_status.replace('Production', 'Prod.')
|
||||
|
||||
software_branch = branch.get(SOFTWARE_BRANCH_KEY).replace('NVIDIA ', '')
|
||||
for release in branch.get(JSON_RELEASES_KEY):
|
||||
version = release.get(VGPU_KEY, release.get(GRID_KEY, ''))
|
||||
linux_manager = release.get(LINUX_VGPU_MANAGER_KEY, release.get(ALT_VGPU_MANAGER_KEY, ''))
|
||||
linux_driver = release.get(LINUX_DRIVER_KEY)
|
||||
windows_manager = release.get(WINDOWS_VGPU_MANAGER_KEY, release.get(ALT_VGPU_MANAGER_KEY, ''))
|
||||
windows_driver = release.get(WINDOWS_DRIVER_KEY)
|
||||
release_date = release.get(RELEASE_DATE_KEY)
|
||||
is_latest = release.get(VGPU_KEY) == branch.get(LATEST_KEY)
|
||||
|
||||
version = f'{version} *' if is_latest else version
|
||||
eol = branch.get(EOL_KEY) if is_latest else ''
|
||||
s = f'{software_branch:^21} | {branch_status:^21} | {version:<13} | {linux_manager:<21} | {linux_driver:<21} | {windows_manager:<21} | {windows_driver:<21} | {release_date:>21} | {eol:>21}'
|
||||
print(s)
|
||||
|
||||
|
||||
def __dump(filename: str):
|
||||
import json
|
||||
|
||||
file = open(filename, 'w')
|
||||
json.dump(MATRIX, file)
|
||||
file.close()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
MATRIX = {}
|
||||
|
||||
try:
|
||||
import httpx
|
||||
from bs4 import BeautifulSoup
|
||||
except Exception as e:
|
||||
logger.error(f'Failed to import module: {e}')
|
||||
logger.info('Run "pip install beautifulsoup4 httpx"')
|
||||
exit(1)
|
||||
|
||||
r = httpx.get(URL)
|
||||
if r.status_code != 200:
|
||||
logger.error(f'Error loading "{URL}" with status code {r.status_code}.')
|
||||
exit(2)
|
||||
|
||||
# parse html
|
||||
soup = BeautifulSoup(r.text, features='html.parser')
|
||||
|
||||
# build matrix
|
||||
__driver_versions(soup)
|
||||
__release_branches(soup)
|
||||
|
||||
# debug output
|
||||
__debug()
|
||||
|
||||
# dump data to file
|
||||
__dump('../app/static/driver_matrix.json')
|
||||
14
test/main.py
14
test/main.py
@@ -1,7 +1,8 @@
|
||||
import sys
|
||||
from base64 import b64encode as b64enc
|
||||
from hashlib import sha256
|
||||
from calendar import timegm
|
||||
from datetime import datetime
|
||||
from hashlib import sha256
|
||||
from os.path import dirname, join
|
||||
from uuid import uuid4, UUID
|
||||
|
||||
@@ -9,7 +10,6 @@ from dateutil.relativedelta import relativedelta
|
||||
from jose import jwt, jwk
|
||||
from jose.constants import ALGORITHMS
|
||||
from starlette.testclient import TestClient
|
||||
import sys
|
||||
|
||||
# add relative path to use packages as they were in the app/ dir
|
||||
sys.path.append('../')
|
||||
@@ -18,6 +18,7 @@ sys.path.append('../app')
|
||||
from app import main
|
||||
from app.util import load_key
|
||||
|
||||
# main.app.add_middleware(PatchMalformedJsonMiddleware, enabled=True)
|
||||
client = TestClient(main.app)
|
||||
|
||||
ORIGIN_REF, ALLOTMENT_REF, SECRET = str(uuid4()), '20000000-0000-0000-0000-000000000001', 'HelloWorld'
|
||||
@@ -106,6 +107,15 @@ def test_auth_v1_origin():
|
||||
assert response.json().get('origin_ref') == ORIGIN_REF
|
||||
|
||||
|
||||
def test_auth_v1_origin_malformed_json(): # see oscar.krause/fastapi-dls#1
|
||||
from middleware import PatchMalformedJsonMiddleware
|
||||
|
||||
# test regex (temporary, until this section is merged into main.py
|
||||
s = '{"environment": {"fingerprint": {"mac_address_list": [ff:ff:ff:ff:ff:ff"]}}'
|
||||
replaced = PatchMalformedJsonMiddleware.fix_json(s)
|
||||
assert replaced == '{"environment": {"fingerprint": {"mac_address_list": ["ff:ff:ff:ff:ff:ff"]}}'
|
||||
|
||||
|
||||
def auth_v1_origin_update():
|
||||
payload = {
|
||||
"registration_pending": False,
|
||||
|
||||
Reference in New Issue
Block a user