366 Commits
1.3.3 ... db

Author SHA1 Message Date
Oscar Krause
5209ece1cd merge fixes 2025-04-16 14:56:26 +02:00
Oscar Krause
522b0a123e Merge branch 'main' into db
# Conflicts:
#	app/orm.py
2025-04-16 14:55:27 +02:00
Oscar Krause
9605ba3eee Merge branch 'dev' into 'main'
Dev

See merge request oscar.krause/fastapi-dls!51
2025-04-16 14:43:58 +02:00
Oscar Krause
cd5c2a6cb1 code styling 2025-04-16 14:18:04 +02:00
Oscar Krause
1d3255188e code styling 2025-04-16 14:12:40 +02:00
Oscar Krause
cea7a01b54 code styling 2025-04-16 14:12:15 +02:00
Oscar Krause
9f417b61a9 fixes 2025-04-16 14:07:18 +02:00
Oscar Krause
4f77200628 code styling 2025-04-16 14:07:18 +02:00
Oscar Krause
20b03446dc Merge branch 'main' into 'dev'
# Conflicts:
#   .gitlab-ci.yml
2025-04-16 13:57:51 +02:00
Oscar Krause
df506e8591 removed unsupported python versions 2025-04-16 13:56:36 +02:00
Oscar Krause
3fe3429986 added some python versions and added EOL 2025-04-16 12:54:24 +02:00
Oscar Krause
a996504c50 test python3.13 and 3.11 2025-04-16 12:44:58 +02:00
Oscar Krause
5d2bff88d8 fixes 2025-04-16 12:35:24 +02:00
Oscar Krause
67f2d18a95 requirements.txt updated 2025-04-16 12:25:37 +02:00
Oscar Krause
52cd34cb5c ci improvements 2025-04-16 12:24:19 +02:00
Oscar Krause
6fb03309a5 ci improvements 2025-04-16 12:23:21 +02:00
Oscar Krause
477e5abbca refactored test pipeline to test different python versions 2025-04-16 12:12:23 +02:00
Oscar Krause
e2c4e45764 Datei .gitlab-ci.yml aktualisieren 2025-04-14 20:45:12 +02:00
Oscar Krause
9e5b4f5a42 Datei .gitlab-ci.yml aktualisieren 2025-04-14 20:13:02 +02:00
Oscar Krause
f9c7475250 ci fixes 2025-04-14 13:37:27 +02:00
Oscar Krause
00dafdc63a do not run duplicated pipeline on feature-branches 2025-04-14 11:17:25 +02:00
Oscar Krause
a8c1cdf095 updated create_driver_matrix_json.py 2025-04-11 14:10:26 +02:00
Oscar Krause
607cca7655 typos 2025-04-10 09:00:58 +02:00
Oscar Krause
6f9b6b9e5c Merge branch 'dev' into 'main'
added NixOS Pull-Request note for official "nixpkgs"

See merge request oscar.krause/fastapi-dls!50
2025-04-10 08:58:36 +02:00
Oscar Krause
d22e93020c added NixOS Pull-Request note for official "nixpkgs" 2025-04-10 07:55:20 +02:00
Oscar Krause
a12d05281c updated default data 2025-04-08 14:30:59 +02:00
Oscar Krause
a31c80465a code styling 2025-04-08 14:05:54 +02:00
Oscar Krause
ddf5f12409 fixes 2025-04-08 14:00:15 +02:00
Oscar Krause
20cdaefa1c code refactorings after merge from main 2025-04-08 13:52:09 +02:00
Oscar Krause
f62f2a2701 Merge branch 'main' into db
# Conflicts:
#	app/main.py
#	app/orm.py
#	app/util.py
#	test/main.py
2025-04-08 10:56:25 +02:00
Oscar Krause
4522425bcc Merge branch 'dev' into 'main'
dev

See merge request oscar.krause/fastapi-dls!48
2025-04-08 10:10:31 +02:00
Oscar Krause
4568881d1e fixes 2025-04-08 09:56:51 +02:00
Oscar Krause
d4d49956fe fixes 2025-04-08 09:38:49 +02:00
Oscar Krause
44a63efc4f requirements.txt updated 2025-04-08 09:22:03 +02:00
Oscar Krause
ca0eedc1f2 .gitignore 2025-04-08 09:18:50 +02:00
Oscar Krause
6d5b389f2a .gitlab-ci.yml 2025-04-08 09:17:44 +02:00
Oscar Krause
c3dbc043b3 updated EOLs 2025-03-21 08:47:28 +01:00
Oscar Krause
666a07507e typos 2025-03-21 08:39:32 +01:00
Oscar Krause
76272af36f added some glfm 2025-03-21 07:47:41 +01:00
Oscar Krause
cc2a11d07b moved the only two FAQ entries to Known-Issues on README 2025-03-21 07:47:26 +01:00
Oscar Krause
b26704646d fixes 2025-03-20 20:24:43 +01:00
Oscar Krause
e0c9bb46ee requirements.txt updated 2025-03-20 20:09:20 +01:00
Oscar Krause
490720f2d6 code styling 2025-03-20 20:09:13 +01:00
Oscar Krause
9ed178098b removed doc directory 2025-03-20 07:11:01 +01:00
Oscar Krause
951fc35203 moved Reverse Engineering Notes to separate project
ref.: https://git.collinwebdesigns.de/nvidia/nls
2025-03-19 21:03:54 +01:00
Oscar Krause
26248a4ea5 fixed tests 2025-03-19 21:01:58 +01:00
Oscar Krause
85623d1a65 code styling 2025-03-18 14:47:33 +01:00
Oscar Krause
e6a2de40c9 fixed test deprecations 2025-03-18 10:33:52 +01:00
Oscar Krause
fd46eecfb3 created PrivateKey / PublicKey wrapper classes 2025-03-18 09:43:44 +01:00
Oscar Krause
958f23f79d fixed "cryptography" dependency 2025-03-17 14:09:55 +01:00
Oscar Krause
0bdd3a6ac2 migrated from "pycryptodome" to "cryptography" 2025-03-17 14:05:26 +01:00
Oscar Krause
8a269b0393 Merge branch 'main' into 'dev'
# Conflicts:
#   doc/Reverse Engineering Notes.md
2025-03-13 20:33:01 +01:00
Oscar Krause
6607756c08 updated Reverse Engineering Notes.md 2025-03-13 20:29:16 +01:00
Oscar Krause
584eee41ef Merge branch 'dev' into 'main'
fixed logging

See merge request oscar.krause/fastapi-dls!47
2025-03-12 13:40:45 +01:00
Oscar Krause
25658cb1fb code styling 2025-03-12 11:41:58 +01:00
Oscar Krause
43fdf1170c Reverse Engineering Notes.md bearbeiten 2025-03-12 08:44:37 +01:00
Oscar Krause
a953e62bcb Reverse Engineering Notes.md bearbeiten 2025-03-11 22:51:45 +01:00
Oscar Krause
9c0cd21e71 Reverse Engineering Notes.md bearbeiten 2025-03-11 22:32:13 +01:00
Oscar Krause
3f5fcbebb3 fixed logging 2025-03-11 22:04:35 +01:00
Oscar Krause
3fdd439035 Reverse Engineering Notes.md bearbeiten 2025-03-11 13:40:21 +01:00
Oscar Krause
d30dbced39 Reverse Engineering Notes.md bearbeiten 2025-03-10 23:47:55 +01:00
Oscar Krause
5b61d0a40e Reverse Engineering Notes.md bearbeiten 2025-03-10 21:21:40 +01:00
Oscar Krause
83616c858b Merge branch 'dev' into 'main'
dev

See merge request oscar.krause/fastapi-dls!44
2025-03-09 21:53:50 +01:00
Oscar Krause
ca25349a68 added notes about 18.x branch 2025-03-09 21:37:29 +01:00
Oscar Krause
262312b512 requirements.txt updated 2025-02-25 11:21:59 +01:00
Oscar Krause
aec6535391 Merge branch 'dev' into 'main'
updated driver releases

See merge request oscar.krause/fastapi-dls!43
2025-01-21 08:15:52 +01:00
Oscar Krause
0f4d0eea34 updated driver releases 2025-01-21 07:58:13 +01:00
Oscar Krause
35022d434b Merge branch 'dev' into 'main'
Dev

See merge request oscar.krause/fastapi-dls!42
2025-01-21 07:55:10 +01:00
Oscar Krause
7db43cf61e dependencies updated 2025-01-21 07:02:04 +01:00
Oscar Krause
d7598a37c6 added another further reading link 2025-01-21 07:00:13 +01:00
Oscar Krause
c57d76c74c Merge branch 'dev' into 'main'
Dev

See merge request oscar.krause/fastapi-dls!41
2024-12-16 15:29:32 +01:00
Oscar Krause
dcc3654131 disabled matrix tests for python (covered by apt jobs) 2024-12-16 07:34:55 +01:00
Oscar Krause
32f1be9599 updated DLS 2024-12-14 12:02:48 +01:00
Oscar Krause
e5fc607638 added explicit "algorithms" argument
applied patch from https://github.com/mrzenc/fastapi-dls-nixos/blob/main/add-algorithms-argument.patch from https://github.com/mrzenc/fastapi-dls-nixos
2024-12-13 07:18:14 +01:00
Oscar Krause
f72c64dcb3 renamed "python3-jose" dependency to alias of "python3-josepy" to match apt packages on debian/ubuntu 2024-12-12 12:54:39 +01:00
Oscar Krause
3659aec4b2 refactored gitlab-ci 2024-12-12 12:34:19 +01:00
Oscar Krause
4a501da27b code styling 2024-12-02 10:03:35 +01:00
Oscar Krause
025b88926b fixes 2024-12-02 10:02:54 +01:00
Oscar Krause
f5943cd636 removed return statement on tests 2024-12-02 09:42:56 +01:00
Oscar Krause
03b9b4a598 moved from deprecated "datetime.utcnow()" to "datetime.now(UTC)" 2024-12-02 09:42:41 +01:00
Oscar Krause
ea8a66d449 removed "PatchMalformedJsonMiddleware" because its not working on driver site
ref. oscar.krause/fastapi-dls#1
2024-12-02 08:37:01 +01:00
Oscar Krause
991a35ef1a implemented "fix_ip_address_list_length" 2024-11-29 13:04:10 +01:00
Oscar Krause
e20a9f4b32 added "NixOS" section from mrzenc
ref. https://github.com/mrzenc/fastapi-dls-nixos
2024-11-25 07:21:57 +01:00
Oscar Krause
1b6f142cb5 marked regex with 'r' 2024-11-22 15:07:54 +01:00
Oscar Krause
1daa365df9 code styling 2024-11-22 15:00:16 +01:00
Oscar Krause
afb38d628b typos 2024-11-22 14:19:51 +01:00
Oscar Krause
53c88a79ac improved logging and implemented method to reduce response mac_address_length 2024-11-22 14:16:10 +01:00
Oscar Krause
a43ddf79c3 updated supported os 2024-11-21 11:13:30 +01:00
Oscar Krause
699dbf6fac Merge branch '1-parsing-issue-in-mal-formatted-mac_address_list' into 'main'
Resolve "Parsing issue in mal formatted "mac_address_list""

Closes #1

See merge request oscar.krause/fastapi-dls!40
2024-11-21 09:18:22 +01:00
Oscar Krause
317699ff58 code styling 2024-11-21 08:51:39 +01:00
Oscar Krause
55446f7d9c fixes 2024-11-21 08:51:39 +01:00
Oscar Krause
88c78efcd9 fixes 2024-11-21 08:51:39 +01:00
Oscar Krause
fb3ac4291f code styling 2024-11-21 08:51:39 +01:00
Oscar Krause
15f14cac11 implemented "SUPPORT_MALFORMED_JSON" variable 2024-11-21 08:51:39 +01:00
Oscar Krause
018d7c34fc fixes 2024-11-21 08:51:39 +01:00
Oscar Krause
1aee423120 fixes 2024-11-21 08:51:39 +01:00
Oscar Krause
a6b2f2a942 fixed json payload 2024-11-21 08:51:39 +01:00
Oscar Krause
e33024db86 fixed variable names
ref. oscar.krause/fastapi-dls#1
2024-11-21 08:51:39 +01:00
Oscar Krause
4ad15f0849 fix malformed json on auth
ref. oscar.krause/fastapi-dls#1
2024-11-21 08:51:39 +01:00
Oscar Krause
7bad0359af updated ci pipeline to match current eol supported systems 2024-11-21 08:44:14 +01:00
Oscar Krause
59a7c9f15a Merge branch 'dev' into 'main'
Dev

See merge request oscar.krause/fastapi-dls!38
2024-11-13 16:11:40 +01:00
Oscar Krause
bc6d692f0a added "delete_expired" method for leases 2024-11-13 15:03:37 +01:00
Oscar Krause
63c37c6334 fixed timezone in json response 2024-11-13 15:03:12 +01:00
Oscar Krause
fa2c06972e sql query improvements 2024-11-13 15:01:33 +01:00
Oscar Krause
e4e6387b2a ci improvements 2024-11-13 14:58:55 +01:00
Oscar Krause
f2be9dca8d Merge branch 'dev' into 'main'
requirements.txt updated

See merge request oscar.krause/fastapi-dls!36
2024-11-13 14:09:54 +01:00
Oscar Krause
52dd425583 fixes 2024-11-13 13:41:07 +01:00
Oscar Krause
286399d79a fixed test matrix 2024-11-13 10:48:11 +01:00
Oscar Krause
4ab1a2ed22 added requirements for ubuntu 24.10 2024-11-13 10:28:08 +01:00
Oscar Krause
459c0e21af debugging 2024-11-13 10:27:52 +01:00
Oscar Krause
98ef64211b typings 2024-11-13 09:09:00 +01:00
Oscar Krause
0b4bb65546 added python3-pip to test 2024-11-13 08:55:00 +01:00
Oscar Krause
47624f5019 Dockerfile - updated db dependencies 2024-11-13 08:37:07 +01:00
Oscar Krause
2b9d7821c0 improved gitlab test matrix 2024-11-13 08:33:28 +01:00
Oscar Krause
45f5108717 requirements.txt updated 2024-11-13 08:25:40 +01:00
Oscar Krause
a7fe8b867e Merge branch 'dev' into 'main'
added way to include driver version in api

See merge request oscar.krause/fastapi-dls!35
2024-10-24 13:28:08 +02:00
Oscar Krause
78214df9cc updated to python 3.12 2024-10-24 10:44:31 +02:00
Oscar Krause
4245d5a582 requirements.txt updated 2024-10-24 08:09:30 +02:00
Oscar Krause
9b5a387169 updated support matrix 2024-10-24 08:09:24 +02:00
Oscar Krause
9377d5ce28 requirements.txt updated 2024-10-08 14:33:44 +02:00
Oscar Krause
7489307db8 README.md updated 2024-08-09 13:15:16 +02:00
Oscar Krause
d41314e81d requirements.txt updated 2024-08-09 13:14:53 +02:00
Oscar Krause
a1123d5451 updated support matrix (removed EOL) 2024-07-24 05:35:22 +02:00
Oscar Krause
93cf719454 updated support matrix 2024-07-24 05:35:09 +02:00
Oscar Krause
0dc8f6c582 refactorings 2024-07-11 05:49:13 +02:00
Oscar Krause
4b0219b85a updated to new vgpu page
ref. https://docs.nvidia.com/vgpu/index.html
2024-07-11 05:49:00 +02:00
Oscar Krause
8edbb25c16 README updated 2024-06-27 08:49:31 +02:00
Oscar Krause
49a24f0b68 README updated 2024-06-27 08:47:35 +02:00
Oscar Krause
8af3c8e2b3 README updated 2024-06-27 08:47:04 +02:00
Oscar Krause
3c321a202c README updated 2024-06-27 08:45:25 +02:00
Oscar Krause
1b7d8bc0dc README reorganized 2024-06-27 08:37:21 +02:00
Oscar Krause
23ccea538f README reorganized 2024-06-27 08:32:15 +02:00
Oscar Krause
cd4674caad fixes 2024-06-21 19:35:42 +02:00
Oscar Krause
b0b627a3f0 Merge branch 'refs/heads/dev' into db
# Conflicts:
#	.gitlab-ci.yml
#	Dockerfile
#	README.md
#	app/main.py
#	app/orm.py
#	requirements.txt
2024-06-21 19:28:23 +02:00
Oscar Krause
c79455b84d added way to include driver version in api
use `create_driver_matrix_json.py` to generate file in `static/driver_matrix.json`. Logging currently is disabled to not confuse users when file is missing. This is optional!
2024-06-21 19:01:33 +02:00
Oscar Krause
35fc5ea6b0 set log format 2024-06-21 18:59:23 +02:00
Oscar Krause
6a54c05fbb Merge branch 'dev' into 'main'
Dev

See merge request oscar.krause/fastapi-dls!34
2024-06-18 13:56:30 +02:00
Oscar Krause
c9ac915055 code styling & comments 2024-06-13 20:34:45 +02:00
Oscar Krause
8c5850beda migrated from deprecated "startup" to "lifespan" hook (fastapi) 2024-06-13 20:34:27 +02:00
Oscar Krause
0d9e814d0d show if debug is enabled on app startup 2024-06-13 20:18:33 +02:00
Oscar Krause
5438317fb7 rearranged imports 2024-06-13 20:18:18 +02:00
Oscar Krause
21f19be8ab code styling & improved logging 2024-06-13 20:16:51 +02:00
Oscar Krause
eff6aae25d code styling 2024-06-13 19:53:57 +02:00
Oscar Krause
6473655e57 import fixes 2024-06-13 19:24:46 +02:00
Oscar Krause
c45aa1a2a8 fixed [[__TOC__]] to [TOC] to support "markdown" package 2024-06-12 21:27:56 +02:00
Oscar Krause
1d0631417d added link to gpu support matrix 2024-06-12 19:44:37 +02:00
Oscar Krause
847d3589c5 typos 2024-06-07 08:42:41 +02:00
Oscar Krause
ca53a4e084 updated supported vGPU releases 2024-06-07 08:42:37 +02:00
Oscar Krause
006d3a1833 Merge branch 'dev' into 'main'
Added Ubuntu 24.04 support & updated requirements

See merge request oscar.krause/fastapi-dls!33
2024-05-10 10:52:00 +02:00
Oscar Krause
ad3b622c23 requirements.txt updated 2024-05-10 10:09:28 +02:00
Oscar Krause
e51d6bd391 added Ubuntu 24.04 as supported 2024-05-10 09:16:20 +02:00
Oscar Krause
78c1978dd5 added test matrix for python3.12 2024-05-10 09:15:46 +02:00
Oscar Krause
4ebb4d790e added ubuntu-24.04 "requirements-ubuntu-24.04.txt" 2024-05-10 08:22:47 +02:00
Oscar Krause
11f1456538 test image matrix 2024-05-10 07:56:13 +02:00
Oscar Krause
be6797efc7 requirements.txt updated 2024-04-23 07:43:39 +02:00
Oscar Krause
42fe066e1a Merge branch 'dev' into 'main'
Dev

See merge request oscar.krause/fastapi-dls!32
2024-04-18 07:38:31 +02:00
Oscar Krause
9eb91cbe1a link to proxmox-installer.sh and credits 2024-04-18 07:07:57 +02:00
Oscar Krause
395884f643 credits & further reading 2024-04-10 07:09:01 +02:00
Oscar Krause
ef542ec821 Merge branch 'dev' into 'main'
Dev

See merge request oscar.krause/fastapi-dls!31
2024-04-09 10:28:57 +02:00
Oscar Krause
254e4ee08c requirements.txt updated 2024-04-09 08:13:33 +02:00
Oscar Krause
07273c3ebd update vGPU Version Matrix 2024-04-09 08:01:20 +02:00
Oscar Krause
e04723d128 removed biggerthanshit link 2024-04-09 08:01:01 +02:00
Oscar Krause
8f498f4960 added 17.1 as supported 2024-04-08 15:10:01 +02:00
Oscar Krause
dd69f60fd0 added link to releases & release notes 2024-03-06 20:58:51 +01:00
Oscar Krause
a5d599a52c typos 2024-03-04 21:31:56 +01:00
Oscar Krause
16f80cd78b added "16.3" support 2024-03-04 21:19:59 +01:00
Oscar Krause
07aec53787 requirements.txt updated 2024-03-04 21:19:59 +01:00
Oscar Krause
3e87820f63 removed todo 2024-03-04 21:19:59 +01:00
Oscar Krause
a927e291b5 fixes 2024-03-04 21:19:59 +01:00
Oscar Krause
72054d30c4 make tests interruptible 2024-03-04 21:19:59 +01:00
Oscar Krause
00dc848083 only run test matrix when "app" or "test" changes 2024-03-04 21:19:59 +01:00
Oscar Krause
78b6fe52c7 fixed CI/CD path from "/builds" to "/tmp/builds" 2024-03-04 21:19:59 +01:00
Oscar Krause
f82d73bb01 run different jobs on "$CI_DEFAULT_BRANCH" 2024-03-04 21:19:59 +01:00
Oscar Krause
416df311b8 removed pylint 2024-03-04 21:19:59 +01:00
Oscar Krause
a6ea8241c2 disabled pylint 2024-03-04 21:19:59 +01:00
Oscar Krause
e70f70d806 disabled code_quality debug 2024-03-04 21:19:59 +01:00
Oscar Krause
77be5772c4 Update .codeclimate.yml 2024-03-04 21:19:59 +01:00
Oscar Krause
6c1b05c66a fixed test_coverage (fail on matrix) 2024-03-04 21:19:59 +01:00
Oscar Krause
a54411a957 added code_quality debug 2024-03-04 21:19:59 +01:00
Oscar Krause
90e0cb8e84 added code_quality “SOURCE_CODE” variable 2024-03-04 21:19:59 +01:00
Oscar Krause
eecb59e2e4 removed "cython" from "test" 2024-03-04 21:19:59 +01:00
Oscar Krause
4c0f65faec removed tests for "23.04"
> gcc -Wsign-compare -DNDEBUG -g -fwrapv -O3 -Wall -fPIC -I/tmp/pip-install-sazb8fvo/httptools_694f06fa2e354ed9ba9f5c167df7fce4/vendor/llhttp/include -I/tmp/pip-install-sazb8fvo/httptools_694f06fa2e354ed9ba9f5c167df7fce4/vendor/llhttp/src -I/usr/local/include/python3.11 -c httptools/parser/parser.c -o build/temp.linux-x86_64-cpython-311/httptools/parser/parser.o -O2
      httptools/parser/parser.c:212:12: fatal error: longintrepr.h: No such file or directory
2024-03-04 21:19:59 +01:00
Oscar Krause
e41084f5c5 added tests for Ubuntu "Mantic Minotaur" 2024-03-04 21:19:59 +01:00
Oscar Krause
36d5b83fb8 requirements.txt updated 2024-03-04 21:19:59 +01:00
Oscar Krause
11138c2191 updated debian bookworm 12 dependencies 2024-03-04 21:19:59 +01:00
Oscar Krause
ff9e85e32b updated test to debian bookworm 2024-03-04 21:19:59 +01:00
Oscar Krause
cb6a089678 fixed testing dependency 2024-03-04 21:19:59 +01:00
Oscar Krause
085186f82a added gcc as dependency 2024-03-04 21:19:59 +01:00
Oscar Krause
f77d3feee1 fixes 2024-03-04 21:19:59 +01:00
Oscar Krause
f2721c7663 fixed debian package versions 2024-03-04 21:19:59 +01:00
Oscar Krause
40cb5518cb fixed versions & added 16.2 as supported 2024-03-04 21:19:59 +01:00
Oscar Krause
021c0ac38d added os specific requirements.txt 2024-03-04 21:19:59 +01:00
Oscar Krause
9c22628b4e implemented python test matrix for different python dependencies on different os releases 2024-03-04 21:19:59 +01:00
Oscar Krause
966b421dad README.md updated 2024-03-04 21:19:59 +01:00
Oscar Krause
7f8752a93d updated ubuntu from 22.10 (EOL) to 23.04 2024-03-04 21:19:59 +01:00
Oscar Krause
30979fd18e requirements.txt updated 2024-03-04 21:19:59 +01:00
Oscar Krause
72965cc879 added 16.1 as supported nvidia driver release 2024-03-04 21:19:59 +01:00
Oscar Krause
1887cbc534 added macOS as supported host (using python-venv) 2024-03-04 21:19:59 +01:00
Oscar Krause
2e942f4553 added Docker supported system architectures 2024-03-04 21:19:59 +01:00
Oscar Krause
3dda920a52 added linkt to driver compatibility section 2024-03-04 21:19:59 +01:00
Oscar Krause
765a994d83 requirements.txt updated 2024-03-04 21:19:59 +01:00
Oscar Krause
23488f94d4 added support for 16.0 drivers to readme 2024-03-04 21:19:59 +01:00
Oscar Krause
f9341cdab4 fixed docker image name (gitlab registry) 2024-03-04 21:19:59 +01:00
Oscar Krause
cad81ad1d6 fixed deploy docker 2024-03-04 21:19:59 +01:00
Oscar Krause
b07b7da2f3 fixed new docker registry image path 2024-03-04 21:19:59 +01:00
Oscar Krause
1ef7dd82f6 toggle api endpoints 2024-03-04 21:19:25 +01:00
Oscar Krause
5a1b1a5950 typos 2024-03-04 21:19:25 +01:00
Oscar Krause
83f4b42f01 added information about ipv6 may be must disabled 2024-03-04 21:19:25 +01:00
Oscar Krause
a3baaab26f removed mysql from included docker drivers 2024-03-04 21:19:25 +01:00
Oscar Krause
aa4ebfce73 added docker command to logging section
thanks to @libreshare (https://gitea.publichub.eu/oscar.krause/fastapi-dls/issues/2)
2024-03-04 21:19:25 +01:00
Oscar Krause
aa746feb13 improvements
thanks to @AbsolutelyFree (https://gitea.publichub.eu/oscar.krause/fastapi-dls/issues/1)
2024-03-04 21:19:25 +01:00
Oscar Krause
fce0eb6d74 fixed "deploy:pacman" 2024-03-04 21:19:25 +01:00
Oscar Krause
32806e5cca push multiarch image to docker-hub 2024-03-04 21:19:25 +01:00
Oscar Krause
50eddeecfc fixed mariadb-client installation
ref. https://github.com/PyMySQL/mysqlclient/discussions/624
2024-03-04 21:19:25 +01:00
Oscar Krause
092e6186ab added missing "pkg-config" for "mysqlclient==2.2.0"
ref. https://stackoverflow.com/questions/76533384/docker-alpine-build-fails-on-mysqlclient-installation-with-error-exception-can
2024-03-04 21:19:25 +01:00
Oscar Krause
acbe889fd9 fixed versions 2024-03-04 21:19:25 +01:00
Oscar Krause
05cad95c2a refactored docker-compose.yml so very simple example, and moved proxy to "examples" directory 2024-03-04 21:19:25 +01:00
Oscar Krause
c9e36759e3 added 15.3 to supported drivers list 2024-03-04 21:19:25 +01:00
Oscar Krause
d116eec626 updated compatibility list 2024-03-04 21:19:25 +01:00
Oscar Krause
b1620154db docker-compose.yml - added note for TZ 2024-03-04 21:19:25 +01:00
Oscar Krause
4181095791 requirements.txt updated 2024-03-04 21:19:25 +01:00
Oscar Krause
66d203e72a requirements.txt updated 2024-03-04 21:13:12 +01:00
Oscar Krause
7800bf73a8 added "16.4" and "17.0" as supported 2024-02-27 15:44:34 +01:00
Oscar Krause
5b39598487 Merge branch 'dev' into 'main'
Dev

See merge request oscar.krause/fastapi-dls!30
2024-02-27 08:20:43 +01:00
Oscar Krause
ed59260a10 added "16.3" support 2024-02-26 20:53:47 +01:00
Oscar Krause
7c70d121be requirements.txt updated 2024-02-26 20:53:33 +01:00
Oscar Krause
213e768708 removed todo 2024-01-18 17:02:09 +01:00
Oscar Krause
0696900d67 fixes 2024-01-18 16:58:33 +01:00
Oscar Krause
4fb90a22e3 make tests interruptible 2024-01-18 13:10:12 +01:00
Oscar Krause
6aa197dcae only run test matrix when "app" or "test" changes 2024-01-18 13:09:30 +01:00
Oscar Krause
46f6c9fe99 fixed CI/CD path from "/builds" to "/tmp/builds" 2024-01-18 13:06:45 +01:00
Oscar Krause
2baaeb561b run different jobs on "$CI_DEFAULT_BRANCH" 2024-01-18 12:59:06 +01:00
Oscar Krause
867cd7018a removed pylint 2024-01-18 12:58:43 +01:00
Oscar Krause
9c686913dd disabled pylint 2024-01-18 12:46:51 +01:00
Oscar Krause
d3c4dc3fb7 disabled code_quality debug 2024-01-18 08:34:43 +01:00
Oscar Krause
af8b1c2387 Update .codeclimate.yml 2024-01-17 23:13:20 +01:00
Oscar Krause
d37d96dc34 fixed test_coverage (fail on matrix) 2024-01-17 23:05:57 +01:00
Oscar Krause
21d052523f added code_quality debug 2024-01-17 22:43:47 +01:00
Oscar Krause
22110df791 added code_quality “SOURCE_CODE” variable 2024-01-17 22:37:33 +01:00
Oscar Krause
c7f354d50c removed "cython" from "test" 2024-01-17 11:33:22 +01:00
Oscar Krause
3bdfc94527 removed tests for "23.04"
> gcc -Wsign-compare -DNDEBUG -g -fwrapv -O3 -Wall -fPIC -I/tmp/pip-install-sazb8fvo/httptools_694f06fa2e354ed9ba9f5c167df7fce4/vendor/llhttp/include -I/tmp/pip-install-sazb8fvo/httptools_694f06fa2e354ed9ba9f5c167df7fce4/vendor/llhttp/src -I/usr/local/include/python3.11 -c httptools/parser/parser.c -o build/temp.linux-x86_64-cpython-311/httptools/parser/parser.o -O2
      httptools/parser/parser.c:212:12: fatal error: longintrepr.h: No such file or directory
2024-01-17 09:33:58 +01:00
Oscar Krause
9473f10653 added tests for Ubuntu "Mantic Minotaur" 2024-01-17 08:08:37 +01:00
Oscar Krause
e9ad1d7791 requirements.txt updated 2024-01-12 14:53:17 +01:00
Oscar Krause
f97ee9c8fc updated debian bookworm 12 dependencies 2024-01-12 14:25:03 +01:00
Oscar Krause
236948e483 updated test to debian bookworm 2023-11-03 14:03:48 +01:00
Oscar Krause
948934ad0e fixed testing dependency 2023-11-03 12:53:50 +01:00
Oscar Krause
3ef14e5522 added gcc as dependency 2023-11-03 11:41:44 +01:00
Oscar Krause
ee50ede2ea fixes 2023-11-03 10:49:06 +01:00
Oscar Krause
b11579de98 fixed debian package versions 2023-11-03 09:28:23 +01:00
Oscar Krause
dc33c29158 fixed versions & added 16.2 as supported 2023-11-03 08:23:07 +01:00
Oscar Krause
6f9107087b added os specific requirements.txt 2023-10-25 07:36:17 +02:00
Oscar Krause
01fd954252 implemented python test matrix for different python dependencies on different os releases 2023-10-25 07:31:29 +02:00
Oscar Krause
995dbdac80 README.md updated 2023-10-25 07:30:57 +02:00
Oscar Krause
65de4d0534 Merge branch 'dev' into 'main'
Dev

See merge request oscar.krause/fastapi-dls!29
2023-10-16 10:27:49 +02:00
Oscar Krause
51b28dcdc3 updated ubuntu from 22.10 (EOL) to 23.04 2023-10-16 09:50:24 +02:00
Oscar Krause
9512e29ed9 requirements.txt updated 2023-09-26 07:09:06 +02:00
Oscar Krause
713e33eed1 added 16.1 as supported nvidia driver release 2023-09-26 07:08:58 +02:00
Oscar Krause
4b16b02a7d added macOS as supported host (using python-venv) 2023-09-26 07:08:41 +02:00
Oscar Krause
3e9d7c0061 added Docker supported system architectures 2023-09-26 07:08:12 +02:00
Oscar Krause
7480cb4cf7 added linkt to driver compatibility section 2023-07-13 06:46:27 +02:00
Oscar Krause
58ffa752f3 Merge branch 'dev' into 'main'
Dev

See merge request oscar.krause/fastapi-dls!28
2023-07-10 19:11:28 +02:00
Oscar Krause
2d7909546d requirements.txt updated 2023-07-10 18:47:46 +02:00
Oscar Krause
fec099ae81 added support for 16.0 drivers to readme 2023-07-10 13:32:32 +02:00
Oscar Krause
fd4fa84dc5 fixed docker image name (gitlab registry) 2023-07-04 19:39:06 +02:00
Oscar Krause
5ff3295658 fixed deploy docker 2023-07-04 18:58:13 +02:00
Oscar Krause
ca38ebe3fd Merge branch 'dev' into 'main'
Multiarch to DockerHub

See merge request oscar.krause/fastapi-dls!27
2023-07-04 18:47:45 +02:00
Oscar Krause
df5cb3c9c3 Merge branch 'main' into 'dev'
# Conflicts:
#   .gitlab-ci.yml
2023-07-04 16:19:49 +00:00
Oscar Krause
eca64fb1d5 push multiarch image to docker-hub 2023-07-04 17:47:10 +02:00
Oscar Krause
7ae1201c8f fixed new docker registry image path 2023-07-04 13:43:15 +02:00
Oscar Krause
a4e98dae46 fixed docker image path 2023-07-04 13:42:21 +02:00
Oscar Krause
d4267f3ee6 toggle api endpoints 2023-07-04 12:42:31 +02:00
Oscar Krause
c02ca762ea typos 2023-07-04 12:42:19 +02:00
Oscar Krause
10caf2310c added information about ipv6 may be must disabled 2023-07-04 12:39:13 +02:00
Oscar Krause
7380e4328e removed mysql from included docker drivers 2023-07-04 12:38:54 +02:00
Oscar Krause
c1eaa33d9e added docker command to logging section
thanks to @libreshare (https://gitea.publichub.eu/oscar.krause/fastapi-dls/issues/2)
2023-07-04 12:22:22 +02:00
Oscar Krause
45545953ed improvements
thanks to @AbsolutelyFree (https://gitea.publichub.eu/oscar.krause/fastapi-dls/issues/1)
2023-07-04 12:19:07 +02:00
Oscar Krause
4c8c2ed3d6 fixed "deploy:pacman" 2023-07-04 11:55:26 +02:00
Oscar Krause
6483af4ba9 Merge branch 'dev' into 'main'
Dev

See merge request oscar.krause/fastapi-dls!26
2023-07-04 11:39:37 +02:00
Oscar Krause
e6595c05d5 fixed mariadb-client installation
ref. https://github.com/PyMySQL/mysqlclient/discussions/624
2023-07-04 11:06:00 +02:00
Oscar Krause
fb1dbea1ee added missing "pkg-config" for "mysqlclient==2.2.0"
ref. https://stackoverflow.com/questions/76533384/docker-alpine-build-fails-on-mysqlclient-installation-with-error-exception-can
2023-07-04 10:50:35 +02:00
Oscar Krause
f576ded038 fixed versions 2023-07-04 10:33:30 +02:00
Oscar Krause
54eaf55ee8 refactored docker-compose.yml so very simple example, and moved proxy to "examples" directory 2023-07-04 10:24:11 +02:00
Oscar Krause
3119d2c7ea added 15.3 to supported drivers list 2023-07-04 10:18:07 +02:00
Oscar Krause
e40f4ce41f updated compatibility list 2023-07-04 10:17:45 +02:00
Oscar Krause
576f22333e docker-compose.yml - added note for TZ 2023-07-04 10:17:34 +02:00
Oscar Krause
0f53436700 requirements.txt updated 2023-07-04 10:17:12 +02:00
Oscar Krause
248c70a862 Merge branch 'dev' into db 2023-06-12 15:19:28 +02:00
Oscar Krause
39a2408d8d migrated api to database-config (Site, Instance) 2023-06-12 15:19:06 +02:00
Oscar Krause
18807401e4 orm improvements & fixes 2023-06-12 15:14:12 +02:00
Oscar Krause
5e47ad7729 fastapi openapi url 2023-06-12 15:13:53 +02:00
Oscar Krause
20448bc587 improved relationships 2023-06-12 15:13:29 +02:00
Oscar Krause
5e945bc43a code styling 2023-06-12 14:47:32 +02:00
Oscar Krause
b4150fa527 implemented Site and Instance orm models including initialization 2023-06-12 12:42:13 +02:00
Oscar Krause
38e1a1725c code styling 2023-06-12 12:40:10 +02:00
Oscar Krause
c79636b1c2 fixed .gitlab-ci.yml deprecated build-ref varialbes
ref. https://gitlab.com/gitlab-org/gitlab/-/issues/352957
2023-06-12 11:03:03 +02:00
Oscar Krause
8de9a89e56 Merge branch 'main' into 'dev'
# Conflicts:
#   requirements.txt
2023-06-12 08:56:01 +00:00
Oscar Krause
801d1786ef requirements.txt updated 2023-06-12 10:52:13 +02:00
Oscar Krause
7e5f8b6c8a implemented endpoint to remove expired leases 2023-06-12 10:48:00 +02:00
Oscar Krause
98da86fc2e removed debian bookworm testing notes 2023-06-12 09:31:43 +02:00
Oscar Krause
14cf6a953f typos 2023-05-09 06:58:51 +02:00
Oscar Krause
6a5d3cb2f7 requirements.txt updated 2023-05-09 06:57:17 +02:00
Oscar Krause
774a1c21a1 improved docker build with "ARG" instead of using "version.env" which is not present on local builds (because it's created by ci-pipeline) 2023-05-09 06:57:03 +02:00
Oscar Krause
d1a77df0e1 updated sudo / su commands (list sudo fist instead of su) 2023-04-17 10:58:25 +02:00
Oscar Krause
c9c73f6cf2 updated docker image requirements.txt 2023-04-17 10:35:23 +02:00
Oscar Krause
b216dcb3dd fixed nvidia-smi path on windows 2023-04-17 10:35:08 +02:00
Oscar Krause
d2e4042932 added 15.2 to supported versions 2023-04-01 23:20:06 +02:00
Oscar Krause
04a1ee0948 added Roadmap 2023-03-24 14:28:44 +01:00
Oscar Krause
c1b5f83f44 Merge branch 'multiarch' into 'main'
multiarch support

See merge request oscar.krause/fastapi-dls!25
2023-03-24 10:29:23 +01:00
Oscar Krause
9d1422cbdf secret detection 2023-03-24 10:00:25 +01:00
Oscar Krause
7b7f14bd82 Aktualisieren .gitlab-ci.yml 2023-03-24 09:31:27 +01:00
Oscar Krause
f72c0f7db3 Aktualisieren .gitlab-ci.yml 2023-03-24 09:11:26 +01:00
Oscar Krause
76d8753f28 Aktualisieren .gitlab-ci.yml 2023-03-24 09:07:49 +01:00
Oscar Krause
593db0e789 Aktualisieren .gitlab-ci.yml 2023-03-24 08:43:29 +01:00
Oscar Krause
3d9e3cb88f set specific arm64 version to v8 2023-03-24 07:48:35 +01:00
Oscar Krause
995b944135 removed "linux/arm/v7" 2023-03-23 11:46:17 +01:00
Oscar Krause
e200c84345 improvements 2023-03-23 11:10:53 +01:00
Oscar Krause
04ff36c94d Aktualisieren .gitlab-ci.yml 2023-03-23 08:36:34 +01:00
Oscar Krause
89704bc2a1 Aktualisieren .gitlab-ci.yml 2023-03-23 08:35:27 +01:00
Oscar Krause
6395214fa0 Aktualisieren .gitlab-ci.yml 2023-03-23 08:24:40 +01:00
Oscar Krause
c8e000eb3e Aktualisieren .gitlab-ci.yml 2023-03-23 08:22:51 +01:00
Oscar Krause
c8e5676c01 Aktualisieren .gitlab-ci.yml 2023-03-23 08:17:31 +01:00
Oscar Krause
6f11bc414c Aktualisieren .gitlab-ci.yml 2023-03-23 08:11:57 +01:00
Oscar Krause
1fc5ac8378 added setup_vgpu_license.sh script 2023-03-22 07:10:18 +01:00
Oscar Krause
87334fbfad added unraid section 2023-03-20 20:21:17 +01:00
Oscar Krause
0fac033657 Merge branch 'dev' into 'main'
dev

See merge request oscar.krause/fastapi-dls!24
2023-03-20 15:01:22 +01:00
Oscar Krause
7cd4e6fde0 fixes 2023-03-20 14:51:54 +01:00
Oscar Krause
a22b56edbe fixes 2023-03-20 14:33:50 +01:00
Oscar Krause
e42dc6aa86 code styling 2023-03-20 10:06:21 +01:00
Oscar Krause
86f703a36c ci improvements 2023-03-20 08:35:06 +01:00
Oscar Krause
71795cc7a2 di improvements 2023-03-20 08:07:24 +01:00
Oscar Krause
4ef041bb54 styling 2023-02-28 13:08:34 +01:00
Oscar Krause
88c8fb98da added some notes about included database drivers in docker image 2023-02-28 07:52:14 +01:00
Oscar Krause
a7b4a4b631 requirements.txt updated 2023-02-14 16:00:04 +01:00
Oscar Krause
7ccb254cbf dependency scanning 2023-02-14 15:48:49 +01:00
Oscar Krause
1d5d3b31fb dependency scanning 2023-02-14 15:32:32 +01:00
Oscar Krause
7af2e02627 improvements 2023-02-14 15:14:19 +01:00
Oscar Krause
938fc6bd60 added SAST 2023-02-14 14:50:21 +01:00
Oscar Krause
1b9ebb48b1 added secret detection 2023-02-14 13:55:49 +01:00
Oscar Krause
4972f00822 fixes 2023-02-14 13:37:25 +01:00
Oscar Krause
210a36c07f added code-quality and test-coverage 2023-02-14 12:59:31 +01:00
Oscar Krause
e1bbd42b50 Merge branch 'dev' into 'main'
1.3.5

See merge request oscar.krause/fastapi-dls!23
2023-02-13 17:30:11 +01:00
Oscar Krause
c1d541f7c6 bump version to 1.3.5 2023-02-13 08:09:37 +01:00
Oscar Krause
4b58fe6e20 added openSUSE Leap 15.4 support 2023-02-13 08:09:21 +01:00
Oscar Krause
b36b49df11 fixed missing mkdir for config file on manual installation method 2023-02-01 07:55:12 +01:00
Oscar Krause
a42b1c8cfb added note to be logged in as root using manual install method (git) 2023-01-30 12:34:46 +01:00
Oscar Krause
59152f95e6 fixed - The `declarative_base()` function is now available as sqlalchemy.orm.declarative_base() 2023-01-30 10:23:09 +01:00
Oscar Krause
62d347510d fixed - sqlalchemy.exc.ArgumentError: Textual SQL expression '\nCREATE TABLE origin (\n\to...' should be explicitly declared as text('\nCREATE TABLE origin (\n\to...') 2023-01-30 10:22:18 +01:00
Oscar Krause
f540c4b25b requirements.txt updated 2023-01-30 09:19:03 +01:00
Oscar Krause
70212e0edd improved docker-compose examples 2023-01-30 09:18:57 +01:00
Oscar Krause
616e8fba5e README - improvements 2023-01-30 08:37:34 +01:00
Oscar Krause
b905ab9dd9 Merge branch 'dev' into 'main'
1.3.4

See merge request oscar.krause/fastapi-dls!22
2023-01-26 07:56:34 +01:00
Oscar Krause
9edc93653e bump version to 1.3.4 2023-01-26 07:38:48 +01:00
Oscar Krause
f30e9237a5 requirements.txt updated 2023-01-26 07:18:01 +01:00
Oscar Krause
f12dc28c42 fixed overriding config file on update / reinstall 2023-01-26 07:17:16 +01:00
Oscar Krause
02276d5440 disabled openapi endpoints 2023-01-23 07:33:54 +01:00
Oscar Krause
9ebff8d6ca typos 2023-01-23 07:29:13 +01:00
Oscar Krause
48eb6d6c64 typos 2023-01-23 07:23:42 +01:00
Oscar Krause
f7ef8d76b6 fixed Origin.delete() 2023-01-23 07:12:02 +01:00
Oscar Krause
bed24b56ce styling 2023-01-19 08:26:35 +01:00
Oscar Krause
95427d430e added startup script 2023-01-19 07:26:22 +01:00
Oscar Krause
c3ea0aa48c added variable for client-token-expire-delta 2023-01-19 07:26:07 +01:00
Oscar Krause
91be7b226c added some comments for default values 2023-01-19 07:25:44 +01:00
Oscar Krause
7045692958 added official links 2023-01-19 07:25:24 +01:00
Oscar Krause
38177fa259 styling 2023-01-18 14:29:48 +01:00
Oscar Krause
9411759f6d added system requirements and preparements 2023-01-18 14:23:34 +01:00
Oscar Krause
48c37987b2 fixed logging and added current timezone info 2023-01-18 14:23:25 +01:00
25 changed files with 1690 additions and 627 deletions

View File

@@ -2,7 +2,7 @@ Package: fastapi-dls
Version: 0.0
Architecture: all
Maintainer: Oscar Krause oscar.krause@collinwebdesigns.de
Depends: python3, python3-fastapi, python3-uvicorn, python3-dotenv, python3-dateutil, python3-jose, python3-sqlalchemy, python3-pycryptodome, python3-markdown, uvicorn, openssl
Depends: python3, python3-fastapi, python3-uvicorn, python3-dotenv, python3-dateutil, python3-josepy, python3-sqlalchemy, python3-cryptography, python3-markdown, uvicorn, openssl
Recommends: curl
Installed-Size: 10240
Homepage: https://git.collinwebdesigns.de/oscar.krause/fastapi-dls

View File

@@ -0,0 +1,11 @@
# https://packages.debian.org/hu/
fastapi==0.92.0
uvicorn[standard]==0.17.6
python-jose[cryptography]==3.3.0
cryptography==38.0.4
python-dateutil==2.8.2
sqlalchemy==1.4.46
markdown==3.4.1
python-dotenv==0.21.0
jinja2==3.1.2
httpx==0.23.3

View File

@@ -0,0 +1,10 @@
# https://packages.ubuntu.com
fastapi==0.101.0
uvicorn[standard]==0.27.1
python-jose[cryptography]==3.3.0
cryptography==41.0.7
python-dateutil==2.8.2
sqlalchemy==1.4.50
markdown==3.5.2
python-dotenv==1.0.1
jinja2==3.1.2

View File

@@ -0,0 +1,10 @@
# https://packages.ubuntu.com
fastapi==0.110.3
uvicorn[standard]==0.30.3
python-jose[cryptography]==3.3.0
cryptography==42.0.5
python-dateutil==2.9.0
sqlalchemy==2.0.32
markdown==3.6
python-dotenv==1.0.1
jinja2==3.1.3

View File

@@ -8,10 +8,11 @@ pkgdesc='NVIDIA DLS server implementation with FastAPI'
arch=('any')
url='https://git.collinwebdesigns.de/oscar.krause/fastapi-dls'
license=('MIT')
depends=('python' 'python-jose' 'python-starlette' 'python-httpx' 'python-fastapi' 'python-dotenv' 'python-dateutil' 'python-sqlalchemy' 'python-pycryptodome' 'uvicorn' 'python-markdown' 'openssl')
depends=('python' 'python-jose' 'python-starlette' 'python-httpx' 'python-fastapi' 'python-dotenv' 'python-dateutil' 'python-sqlalchemy' 'python-cryptography' 'uvicorn' 'python-markdown' 'openssl')
provider=("$pkgname")
install="$pkgname.install"
source=('git+file:///builds/oscar.krause/fastapi-dls' # https://gitea.publichub.eu/oscar.krause/fastapi-dls.git
backup=('etc/default/fastapi-dls')
source=("git+file://${CI_PROJECT_DIR}"
"$pkgname.default"
"$pkgname.service"
"$pkgname.tmpfiles")
@@ -21,8 +22,9 @@ sha256sums=('SKIP'
'3dc60140c08122a8ec0e7fa7f0937eb8c1288058890ba09478420fc30ce9e30c')
pkgver() {
echo -e "VERSION=$VERSION\nCOMMIT=$CI_COMMIT_SHA" > $srcdir/$pkgname/version.env
source $srcdir/$pkgname/version.env
echo ${VERSION}
echo $VERSION
}
check() {
@@ -37,7 +39,7 @@ check() {
package() {
install -d "$pkgdir/usr/share/doc/$pkgname"
install -d "$pkgdir/var/lib/$pkgname/cert"
cp -r "$srcdir/$pkgname/doc"/* "$pkgdir/usr/share/doc/$pkgname/"
#cp -r "$srcdir/$pkgname/doc"/* "$pkgdir/usr/share/doc/$pkgname/"
install -Dm644 "$srcdir/$pkgname/README.md" "$pkgdir/usr/share/doc/$pkgname/README.md"
install -Dm644 "$srcdir/$pkgname/version.env" "$pkgdir/usr/share/doc/$pkgname/version.env"

48
.UNRAID/FastAPI-DLS.xml Normal file
View File

@@ -0,0 +1,48 @@
<?xml version="1.0"?>
<Container version="2">
<Name>FastAPI-DLS</Name>
<Repository>collinwebdesigns/fastapi-dls:latest</Repository>
<Registry>https://hub.docker.com/r/collinwebdesigns/fastapi-dls</Registry>
<Network>br0</Network>
<MyIP></MyIP>
<Shell>sh</Shell>
<Privileged>false</Privileged>
<Support/>
<Project/>
<Overview>Source:&#xD;
https://git.collinwebdesigns.de/oscar.krause/fastapi-dls#docker&#xD;
&#xD;
Make sure you create these certificates before starting the container for the first time:&#xD;
```&#xD;
# Check https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/tree/main/#docker for more information:&#xD;
WORKING_DIR=/mnt/user/appdata/fastapi-dls/cert&#xD;
mkdir -p $WORKING_DIR&#xD;
cd $WORKING_DIR&#xD;
# create instance private and public key for singing JWT's&#xD;
openssl genrsa -out $WORKING_DIR/instance.private.pem 2048 &#xD;
openssl rsa -in $WORKING_DIR/instance.private.pem -outform PEM -pubout -out $WORKING_DIR/instance.public.pem&#xD;
# create ssl certificate for integrated webserver (uvicorn) - because clients rely on ssl&#xD;
openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout $WORKING_DIR/webserver.key -out $WORKING_DIR/webserver.crt&#xD;
```&#xD;
</Overview>
<Category/>
<WebUI>https://[IP]:[PORT:443]</WebUI>
<TemplateURL/>
<Icon>https://git.collinwebdesigns.de/uploads/-/system/project/avatar/106/png-transparent-nvidia-grid-logo-business-nvidia-electronics-text-trademark.png?width=64</Icon>
<ExtraParams>--restart always</ExtraParams>
<PostArgs/>
<CPUset/>
<DateInstalled>1679161568</DateInstalled>
<DonateText/>
<DonateLink/>
<Requires/>
<Config Name="HTTPS Port" Target="" Default="443" Mode="tcp" Description="Same as DLS Port below." Type="Port" Display="always-hide" Required="true" Mask="false">443</Config>
<Config Name="App Cert" Target="/app/cert" Default="/mnt/user/appdata/fastapi-dls/cert" Mode="rw" Description="[REQUIRED] Read the description above to make this folder. &#13;&#10;&#13;&#10;You do not need to change the path." Type="Path" Display="always-hide" Required="true" Mask="false">/mnt/user/appdata/fastapi-dls/cert</Config>
<Config Name="DLS Port" Target="DSL_PORT" Default="443" Mode="" Description="Choose port you want to use. Make sure to change the HTTPS port above to match it." Type="Variable" Display="always-hide" Required="true" Mask="false">443</Config>
<Config Name="App database" Target="/app/database" Default="/mnt/user/appdata/fastapi-dls/data" Mode="rw" Description="[REQUIRED] Read the description above to make this folder. &#13;&#10;&#13;&#10;You do not need to change the path." Type="Path" Display="always-hide" Required="true" Mask="false">/mnt/user/appdata/fastapi-dls/data</Config>
<Config Name="DSL IP" Target="DLS_URL" Default="localhost" Mode="" Description="Put your container's IP (or your host's IP if it's shared)." Type="Variable" Display="always-hide" Required="true" Mask="false"></Config>
<Config Name="Time Zone" Target="TZ" Default="" Mode="" Description="Format example: America/New_York. MUST MATCH YOUR CURRENT TIMEZONE AND THE GUEST VMS TIMEZONE! Otherwise you'll get into issues, read the guide above." Type="Variable" Display="always-hide" Required="true" Mask="false"></Config>
<Config Name="Database" Target="DATABASE" Default="sqlite:////app/database/db.sqlite" Mode="" Description="Set to sqlite:////app/database/db.sqlite" Type="Variable" Display="advanced-hide" Required="true" Mask="false">sqlite:////app/database/db.sqlite</Config>
<Config Name="Debug" Target="DEBUG" Default="true" Mode="" Description="true to enable debugging, false to disable them." Type="Variable" Display="advanced-hide" Required="false" Mask="false">true</Config>
<Config Name="Lease" Target="LEASE_EXPIRE_DAYS" Default="90" Mode="" Description="90 days is the maximum value." Type="Variable" Display="advanced" Required="false" Mask="false">90</Config>
</Container>

View File

@@ -0,0 +1,197 @@
#!/bin/bash
# This script automates the licensing of the vGPU guest driver
# on Unraid boot. Set the Schedule to: "At Startup of Array".
#
# Relies on FastAPI-DLS for the licensing.
# It assumes FeatureType=1 (vGPU), change it as you see fit in line <114>
#
# Requires `eflutils` to be installed in the system for `nvidia-gridd` to run
# To Install it:
# 1) You might find it here: https://packages.slackware.com/ (choose the 64bit version of Slackware)
# 2) Download the package and put it in /boot/extra to be installed on boot
# 3) a. Reboot to install it, OR
# b. Run `upgradepkg --install-new /boot/extra/elfutils*`
# [i]: Make sure to have only one version of elfutils, otherwise you might run into issues
# Sources and docs:
# https://docs.nvidia.com/grid/15.0/grid-vgpu-user-guide/index.html#configuring-nls-licensed-client-on-linux
#
################################################
# MAKE SURE YOU CHANGE THESE VARIABLES #
################################################
###### CHANGE ME!
# IP and PORT of FastAPI-DLS
DLS_IP=192.168.0.123
DLS_PORT=443
# Token folder, must be on a filesystem that supports
# linux filesystem permissions (eg: ext4,xfs,btrfs...)
TOKEN_PATH=/mnt/user/system/nvidia
PING=$(which ping)
# Check if the License is applied
if [[ "$(nvidia-smi -q | grep "Expiry")" == *Expiry* ]]; then
echo " [i] Your vGPU Guest drivers are already licensed."
echo " [i] $(nvidia-smi -q | grep "Expiry")"
echo " [<] Exiting..."
exit 0
fi
# Check if the FastAPI-DLS server is reachable
# Check if the License is applied
MAX_RETRIES=30
for i in $(seq 1 $MAX_RETRIES); do
echo -ne "\r [>] Attempt $i to connect to $DLS_IP."
if ping -c 1 $DLS_IP >/dev/null 2>&1; then
echo -e "\n [*] Connection successful."
break
fi
if [ $i -eq $MAX_RETRIES ]; then
echo -e "\n [!] Connection failed after $MAX_RETRIES attempts."
echo -e "\n [<] Exiting..."
exit 1
fi
sleep 1
done
# Check if the token folder exists
if [ -d "${TOKEN_PATH}" ]; then
echo " [*] Token Folder exists. Proceeding..."
else
echo " [!] Token Folder does not exists or not ready yet. Exiting."
echo " [!] Token Folder Specified: ${TOKEN_PATH}"
exit 1
fi
# Check if elfutils are installed, otherwise nvidia-gridd service
# wont start
if [ "$(grep -R "elfutils" /var/log/packages/* | wc -l)" != 0 ]; then
echo " [*] Elfutils is installed, proceeding..."
else
echo " [!] Elfutils is not installed, downloading and installing..."
echo " [!] Downloading elfutils to /boot/extra"
echo " [i] This script will download elfutils from slackware64-15.0 repository."
echo " [i] If you have a different version of Unraid (6.11.5), you might want to"
echo " [i] download and install a suitable version manually from the slackware"
echo " [i] repository, and put it in /boot/extra to be install on boot."
echo " [i] You may also install it by running: "
echo " [i] upgradepkg --install-new /path/to/elfutils-*.txz"
echo ""
echo " [>] Downloading elfutils from slackware64-15.0 repository:"
wget -q -nc --show-progress --progress=bar:force:noscroll -P /boot/extra https://slackware.uk/slackware/slackware64-15.0/slackware64/l/elfutils-0.186-x86_64-1.txz 2>/dev/null \
|| { echo " [!] Error while downloading elfutils, please download it and install it manually."; exit 1; }
echo ""
if upgradepkg --install-new /boot/extra/elfutils-0.186-x86_64-1.txz
then
echo " [*] Elfutils installed and will be installed automatically on boot"
else
echo " [!] Error while installing, check logs..."
exit 1
fi
fi
echo " [~] Sleeping for 60 seconds before continuing..."
echo " [i] The script is waiting until the boot process settles down."
for i in {60..1}; do
printf "\r [~] %d seconds remaining" "$i"
sleep 1
done
printf "\n"
create_token () {
echo " [>] Creating new token..."
if ${PING} -c1 ${DLS_IP} > /dev/null 2>&1
then
# curl --insecure -L -X GET https://${DLS_IP}:${DLS_PORT}/-/client-token -o ${TOKEN_PATH}/client_configuration_token_"$(date '+%d-%m-%Y-%H-%M-%S')".tok || { echo " [!] Could not get the token, please check the server."; exit 1;}
wget -q -nc -4c --no-check-certificate --show-progress --progress=bar:force:noscroll -O "${TOKEN_PATH}"/client_configuration_token_"$(date '+%d-%m-%Y-%H-%M-%S')".tok https://${DLS_IP}:${DLS_PORT}/-/client-token \
|| { echo " [!] Could not get the token, please check the server."; exit 1;}
chmod 744 "${TOKEN_PATH}"/*.tok || { echo " [!] Could not chmod the tokens."; exit 1; }
echo ""
echo " [*] Token downloaded and stored in ${TOKEN_PATH}."
else
echo " [!] Could not get token, DLS server unavailable ."
exit 1
fi
}
setup_run () {
echo " [>] Setting up gridd.conf"
cp /etc/nvidia/gridd.conf.template /etc/nvidia/gridd.conf || { echo " [!] Error configuring gridd.conf, did you install the drivers correctly?"; exit 1; }
sed -i 's/FeatureType=0/FeatureType=1/g' /etc/nvidia/gridd.conf
echo "ClientConfigTokenPath=${TOKEN_PATH}" >> /etc/nvidia/gridd.conf
echo " [>] Creating /var/lib/nvidia folder structure"
mkdir -p /var/lib/nvidia/GridLicensing
echo " [>] Starting nvidia-gridd"
if pgrep nvidia-gridd >/dev/null 2>&1; then
echo " [!] nvidia-gridd service is running. Closing."
sh /usr/lib/nvidia/sysv/nvidia-gridd stop
stop_exit_code=$?
if [ $stop_exit_code -eq 0 ]; then
echo " [*] nvidia-gridd service stopped successfully."
else
echo " [!] Error while stopping nvidia-gridd service."
exit 1
fi
# Kill the service if it does not close
if pgrep nvidia-gridd >/dev/null 2>&1; then
kill -9 "$(pgrep nvidia-gridd)" || {
echo " [!] Error while closing nvidia-gridd service"
exit 1
}
fi
echo " [*] Restarting nvidia-gridd service."
sh /usr/lib/nvidia/sysv/nvidia-gridd start
if pgrep nvidia-gridd >/dev/null 2>&1; then
echo " [*] Service started, PID: $(pgrep nvidia-gridd)"
else
echo -e " [!] Error while starting nvidia-gridd service. Use strace -f nvidia-gridd to debug.\n [i] Check if elfutils is installed.\n [i] strace is not installed by default."
exit 1
fi
else
sh /usr/lib/nvidia/sysv/nvidia-gridd start
if pgrep nvidia-gridd >/dev/null 2>&1; then
echo " [*] Service started, PID: $(pgrep nvidia-gridd)"
else
echo -e " [!] Error while starting nvidia-gridd service. Use strace -f nvidia-gridd to debug.\n [i] Check if elfutils is installed.\n [i] strace is not installed by default."
exit 1
fi
fi
}
for token in "${TOKEN_PATH}"/*; do
if [ "${token: -4}" == ".tok" ]
then
echo " [*] Tokens found..."
setup_run
else
echo " [!] No Tokens found..."
create_token
setup_run
fi
done
while true; do
if nvidia-smi -q | grep "Expiry" >/dev/null 2>&1; then
echo " [>] vGPU licensed!"
echo " [i] $(nvidia-smi -q | grep "Expiry")"
break
else
echo -ne " [>] vGPU not licensed yet... Checking again in 5 seconds\c"
for i in {1..5}; do
sleep 1
echo -ne ".\c"
done
echo -ne "\r\c"
fi
done
echo " [>] Done..."
exit 0

View File

@@ -1,7 +1,9 @@
version: "2"
plugins:
bandit:
enabled: true
sonar-python:
enabled: true
pylint:
enabled: true
config:
tests_patterns:
- test/**

2
.gitignore vendored
View File

@@ -1,6 +1,6 @@
.DS_Store
venv/
.idea/
app/*.sqlite*
*.sqlite
app/cert/*.*
.pytest_cache

View File

@@ -1,38 +1,58 @@
include:
- template: Jobs/Code-Quality.gitlab-ci.yml
- template: Jobs/Secret-Detection.gitlab-ci.yml
- template: Jobs/SAST.gitlab-ci.yml
- template: Jobs/Container-Scanning.gitlab-ci.yml
- template: Jobs/Dependency-Scanning.gitlab-ci.yml
cache:
key: one-key-to-rule-them-all
variables:
DOCKER_BUILDX_PLATFORM: "linux/amd64,linux/arm64"
build:docker:
image: docker:dind
interruptible: true
stage: build
rules:
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
# deployment is in "deploy:docker:"
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
changes:
- app/**/*
- Dockerfile
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
- requirements.txt
tags: [ docker ]
before_script:
- echo "COMMIT=${CI_COMMIT_SHA}" >> version.env # COMMIT=`git rev-parse HEAD`
- docker buildx inspect
- docker buildx create --use
script:
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- docker build . --tag ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:${CI_BUILD_REF}
- docker push ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:${CI_BUILD_REF}
- IMAGE=$CI_REGISTRY/$CI_PROJECT_PATH/$CI_COMMIT_REF_NAME:$CI_COMMIT_SHA
- docker buildx build --progress=plain --platform $DOCKER_BUILDX_PLATFORM --build-arg VERSION=$CI_COMMIT_REF_NAME --build-arg COMMIT=$CI_COMMIT_SHA --tag $IMAGE --push .
- docker buildx imagetools inspect $IMAGE
- echo "CS_IMAGE=$IMAGE" > container_scanning.env
artifacts:
reports:
dotenv: container_scanning.env
build:apt:
image: debian:bookworm-slim
interruptible: true
stage: build
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_TAG
variables:
VERSION: $CI_COMMIT_REF_NAME
- if: ($CI_PIPELINE_SOURCE == 'merge_request_event') || ($CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH)
changes:
- app/**/*
- .DEBIAN/**/*
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
- .gitlab-ci.yml
variables:
VERSION: "0.0.1"
before_script:
- echo "COMMIT=${CI_COMMIT_SHA}" >> version.env
- source version.env
- echo -e "VERSION=$VERSION\nCOMMIT=$CI_COMMIT_SHA" > version.env
# install build dependencies
- apt-get update -qq && apt-get install -qq -y build-essential
# create build directory for .deb sources
@@ -53,7 +73,7 @@ build:apt:
# cd into "build/"
- cd build/
script:
# set version based on value in "$VERSION" (which is set above from version.env)
# set version based on value in "$CI_COMMIT_REF_NAME"
- sed -i -E 's/(Version\:\s)0.0/\1'"$VERSION"'/g' DEBIAN/control
# build
- dpkg -b . build.deb
@@ -68,14 +88,18 @@ build:pacman:
interruptible: true
stage: build
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_TAG
variables:
VERSION: $CI_COMMIT_REF_NAME
- if: ($CI_PIPELINE_SOURCE == 'merge_request_event') || ($CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH)
changes:
- app/**/*
- .PKGBUILD/**/*
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
- .gitlab-ci.yml
variables:
VERSION: "0.0.1"
before_script:
- echo "COMMIT=${CI_COMMIT_SHA}" >> version.env
#- echo -e "VERSION=$VERSION\nCOMMIT=$CI_COMMIT_SHA" > version.env
# install build dependencies
- pacman -Syu --noconfirm git
# create a build-user because "makepkg" don't like root user
@@ -90,41 +114,70 @@ build:pacman:
# download dependencies
- source PKGBUILD && pacman -Syu --noconfirm --needed --asdeps "${makedepends[@]}" "${depends[@]}"
# build
- sudo -u build makepkg -s
- sudo --preserve-env -u build makepkg -s
artifacts:
expire_in: 1 week
paths:
- "*.pkg.tar.zst"
test:
image: python:3.11-slim-bullseye
test:python:
image: $IMAGE
stage: test
interruptible: true
rules:
- if: $CI_COMMIT_BRANCH
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
changes:
- app/**/*
- test/**/*
variables:
DATABASE: sqlite:///../app/db.sqlite
parallel:
matrix:
- IMAGE:
# https://devguide.python.org/versions/#supported-versions
# - python:3.14-rc-alpine # EOL 2030-10 => uvicorn does not support 3.14 yet
- python:3.13-alpine # EOL 2029-10
- python:3.12-alpine # EOL 2028-10
- python:3.11-alpine # EOL 2027-10
# - python:3.10-alpine # EOL 2026-10 => ImportError: cannot import name 'UTC' from 'datetime'
# - python:3.9-alpine # EOL 2025-10 => ImportError: cannot import name 'UTC' from 'datetime'
before_script:
- apk --no-cache add openssl
- python3 -m venv venv
- source venv/bin/activate
- pip install --upgrade pip
- pip install -r requirements.txt
- pip install pytest httpx
- pip install pytest pytest-cov pytest-custom_exit_code httpx
- mkdir -p app/cert
- openssl genrsa -out app/cert/instance.private.pem 2048
- openssl rsa -in app/cert/instance.private.pem -outform PEM -pubout -out app/cert/instance.public.pem
- cd test
script:
- pytest main.py
- python -m pytest main.py --junitxml=report.xml
artifacts:
reports:
dotenv: version.env
junit: ['**/report.xml']
.test:linux:
test:apt:
image: $IMAGE
stage: test
rules:
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
changes:
- app/**/*
- .DEBIAN/**/*
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
parallel:
matrix:
- IMAGE:
- debian:trixie-slim # EOL: t.b.a.
- debian:bookworm-slim # EOL: June 06, 2026
- debian:bookworm-slim # EOL: June 06, 2026
- ubuntu:24.04 # EOL: April 2036
- ubuntu:24.10
needs:
- job: build:apt
artifacts: true
@@ -156,22 +209,15 @@ test:
- apt-get purge -qq -y fastapi-dls
- apt-get autoremove -qq -y && apt-get clean -qq
test:debian:
extends: .test:linux
image: debian:bookworm-slim
test:ubuntu:
extends: .test:linux
image: ubuntu:22.10
test:archlinux:
test:pacman:archlinux:
image: archlinux:base
rules:
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_BRANCH && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
changes:
- app/**/*
- .PKGBUILD/**/*
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
- .gitlab-ci.yml
needs:
- job: build:pacman
artifacts: true
@@ -179,42 +225,101 @@ test:archlinux:
- pacman -Sy
- pacman -U --noconfirm *.pkg.tar.zst
.deploy:
code_quality:
variables:
SOURCE_CODE: app
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_TAG
- if: $CODE_QUALITY_DISABLED
when: never
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
secret_detection:
rules:
- if: $SECRET_DETECTION_DISABLED
when: never
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
before_script:
- git config --global --add safe.directory $CI_PROJECT_DIR
semgrep-sast:
rules:
- if: $SAST_DISABLED
when: never
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
test_coverage:
# extends: test
image: python:3.12-slim-bookworm
allow_failure: true
stage: test
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
variables:
DATABASE: sqlite:///../app/db.sqlite
before_script:
- apt-get update && apt-get install -y python3-dev gcc
- pip install -r requirements.txt
- pip install pytest pytest-cov pytest-custom_exit_code httpx
- mkdir -p app/cert
- openssl genrsa -out app/cert/instance.private.pem 2048
- openssl rsa -in app/cert/instance.private.pem -outform PEM -pubout -out app/cert/instance.public.pem
- cd test
script:
- coverage run -m pytest main.py --junitxml=report.xml --suppress-no-test-exit-code
- coverage report
- coverage xml
coverage: '/(?i)total.*? (100(?:\.0+)?\%|[1-9]?\d(?:\.\d+)?\%)$/'
artifacts:
reports:
junit: [ '**/report.xml' ]
coverage_report:
coverage_format: cobertura
path: '**/coverage.xml'
container_scanning:
dependencies: [ build:docker ]
rules:
- if: $CONTAINER_SCANNING_DISABLED
when: never
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
gemnasium-python-dependency_scanning:
rules:
- if: $DEPENDENCY_SCANNING_DISABLED
when: never
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
deploy:docker:
extends: .deploy
image: docker:dind
stage: deploy
tags: [ docker ]
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_TAG
before_script:
- echo "COMMIT=${CI_COMMIT_SHA}" >> version.env
- source version.env
- echo "Building docker image for commit ${COMMIT} with version ${VERSION}"
- echo "Building docker image for commit $CI_COMMIT_SHA with version $CI_COMMIT_REF_NAME"
- docker buildx inspect
- docker buildx create --use
script:
- echo "GitLab-Registry"
- echo "========== GitLab-Registry =========="
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- docker build . --tag ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:${VERSION}
- docker build . --tag ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:latest
- docker push ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:${VERSION}
- docker push ${CI_REGISTRY}/${CI_PROJECT_PATH}/${CI_BUILD_REF_NAME}:latest
- echo "Docker-Hub"
- IMAGE=$CI_REGISTRY/$CI_PROJECT_PATH
- docker buildx build --progress=plain --platform $DOCKER_BUILDX_PLATFORM --build-arg VERSION=$CI_COMMIT_REF_NAME --build-arg COMMIT=$CI_COMMIT_SHA --tag $IMAGE:$CI_COMMIT_REF_NAME --push .
- docker buildx build --progress=plain --platform $DOCKER_BUILDX_PLATFORM --build-arg VERSION=$CI_COMMIT_REF_NAME --build-arg COMMIT=$CI_COMMIT_SHA --tag $IMAGE:latest --push .
- echo "========== Docker-Hub =========="
- docker login -u $PUBLIC_REGISTRY_USER -p $PUBLIC_REGISTRY_TOKEN
- docker build . --tag $PUBLIC_REGISTRY_USER/${CI_PROJECT_NAME}:${VERSION}
- docker build . --tag $PUBLIC_REGISTRY_USER/${CI_PROJECT_NAME}:latest
- docker push $PUBLIC_REGISTRY_USER/${CI_PROJECT_NAME}:${VERSION}
- docker push $PUBLIC_REGISTRY_USER/${CI_PROJECT_NAME}:latest
- IMAGE=$PUBLIC_REGISTRY_USER/$CI_PROJECT_NAME
- docker buildx build --progress=plain --platform $DOCKER_BUILDX_PLATFORM --build-arg VERSION=$CI_COMMIT_REF_NAME --build-arg COMMIT=$CI_COMMIT_SHA --tag $IMAGE:$CI_COMMIT_REF_NAME --push .
- docker buildx build --progress=plain --platform $DOCKER_BUILDX_PLATFORM --build-arg VERSION=$CI_COMMIT_REF_NAME --build-arg COMMIT=$CI_COMMIT_SHA --tag $IMAGE:latest --push .
deploy:apt:
# doc: https://git.collinwebdesigns.de/help/user/packages/debian_repository/index.md#install-a-package
extends: .deploy
image: debian:bookworm-slim
stage: deploy
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_TAG
needs:
- job: build:apt
artifacts: true
@@ -251,21 +356,19 @@ deploy:apt:
- 'curl --header "JOB-TOKEN: $CI_JOB_TOKEN" --upload-file ${EXPORT_NAME} "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/${PACKAGE_NAME}/${PACKAGE_VERSION}/${EXPORT_NAME}"'
deploy:pacman:
extends: .deploy
image: archlinux:base-devel
stage: deploy
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_TAG
needs:
- job: build:pacman
artifacts: true
script:
- source .PKGBUILD/PKGBUILD
- source version.env
# fastapi-dls-1.0-1-any.pkg.tar.zst
- BUILD_NAME=${pkgname}-${VERSION}-${pkgrel}-any.pkg.tar.zst
- BUILD_NAME=${pkgname}-${CI_COMMIT_REF_NAME}-${pkgrel}-any.pkg.tar.zst
- PACKAGE_NAME=${pkgname}
- PACKAGE_VERSION=${VERSION}
- PACKAGE_VERSION=${CI_COMMIT_REF_NAME}
- PACKAGE_ARCH=any
- EXPORT_NAME=${BUILD_NAME}
- 'echo "PACKAGE_NAME: ${PACKAGE_NAME}"'
@@ -277,19 +380,15 @@ deploy:pacman:
release:
image: registry.gitlab.com/gitlab-org/release-cli:latest
stage: .post
needs:
- job: test
artifacts: true
needs: [ build:docker, build:apt, build:pacman ]
rules:
- if: $CI_COMMIT_TAG
when: never
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
script:
- echo "Running release-job for $VERSION"
- echo "Running release-job for $CI_COMMIT_TAG"
release:
name: $CI_PROJECT_TITLE $VERSION
description: Release of $CI_PROJECT_TITLE version $VERSION
tag_name: $VERSION
name: $CI_PROJECT_TITLE $CI_COMMIT_TAG
description: Release of $CI_PROJECT_TITLE version $CI_COMMIT_TAG
tag_name: $CI_COMMIT_TAG
ref: $CI_COMMIT_SHA
assets:
links:

View File

@@ -1,17 +1,20 @@
FROM python:3.11-alpine
FROM python:3.12-alpine
ARG VERSION
ARG COMMIT=""
RUN echo -e "VERSION=$VERSION\nCOMMIT=$COMMIT" > /version.env
COPY requirements.txt /tmp/requirements.txt
RUN apk update \
&& apk add --no-cache --virtual build-deps gcc g++ python3-dev musl-dev \
&& apk add --no-cache curl postgresql postgresql-dev mariadb-connector-c-dev sqlite-dev \
&& apk add --no-cache --virtual build-deps gcc g++ python3-dev musl-dev pkgconfig \
&& apk add --no-cache curl postgresql postgresql-dev mariadb-dev sqlite-dev \
&& pip install --no-cache-dir --upgrade uvicorn \
&& pip install --no-cache-dir psycopg2==2.9.5 mysqlclient==2.1.1 pysqlite3==0.5.0 \
&& pip install --no-cache-dir psycopg2==2.9.10 mysqlclient==2.2.7 pysqlite3==0.5.4 \
&& pip install --no-cache-dir -r /tmp/requirements.txt \
&& apk del build-deps
COPY app /app
COPY version.env /version.env
COPY README.md /README.md
HEALTHCHECK --start-period=30s --interval=10s --timeout=5s --retries=3 CMD curl --insecure --fail https://localhost/-/health || exit 1

17
FAQ.md
View File

@@ -1,17 +0,0 @@
# FAQ
## `Failed to acquire license from <ip> (Info: <license> - Error: The allowed time to process response has expired)`
- Did your timezone settings are correct on fastapi-dls **and your guest**?
- Did you download the client-token more than an hour ago?
Please download a new client-token. The guest have to register within an hour after client-token was created.
## `jose.exceptions.JWTError: Signature verification failed.`
- Did you recreated `instance.public.pem` / `instance.private.pem`?
Then you have to download a **new** client-token on each of your guests.

383
README.md
View File

@@ -2,22 +2,63 @@
Minimal Delegated License Service (DLS).
Compatibility tested with official DLS 2.0.1.
> [!note] Compatibility
> Compatibility tested with official NLS 2.0.1, 2.1.0, 3.1.0, 3.3.1, 3.4.0. For Driver compatibility
> see [compatibility matrix](#vgpu-software-compatibility-matrix).
> [!warning] 18.x Drivers are not yet supported!
> Drivers are only supported until **17.x releases**.
This service can be used without internet connection.
Only the clients need a connection to this service on configured port.
[[_TOC_]]
**Official Links**
* https://git.collinwebdesigns.de/oscar.krause/fastapi-dls (Private Git)
* https://gitea.publichub.eu/oscar.krause/fastapi-dls (Public Git)
* https://hub.docker.com/r/collinwebdesigns/fastapi-dls (Docker-Hub `collinwebdesigns/fastapi-dls:latest`)
*All other repositories are forks! (which is no bad - just for information and bug reports)*
[Releases & Release Notes](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/releases)
**Further Reading**
* [NVIDIA vGPU Guide](https://gitlab.com/polloloco/vgpu-proxmox) - This document serves as a guide to install NVIDIA vGPU host drivers on the latest Proxmox VE version
* [vgpu_unlock](https://github.com/DualCoder/vgpu_unlock) - Unlock vGPU functionality for consumer-grade Nvidia GPUs.
* [vGPU_Unlock Wiki](https://docs.google.com/document/d/1pzrWJ9h-zANCtyqRgS7Vzla0Y8Ea2-5z2HEi4X75d2Q) - Guide for `vgpu_unlock`
* [Proxmox 8 vGPU in VMs and LXC Containers](https://medium.com/@dionisievldulrincz/proxmox-8-vgpu-in-vms-and-lxc-containers-4146400207a3) - Install *Merged Drivers* for using in Proxmox VMs and LXCs
* [Proxmox All-In-One Installer Script](https://wvthoog.nl/proxmox-vgpu-v3/) - Also known as `proxmox-installer.sh`
---
[TOC]
# Setup (Service)
**System requirements**
- 256mb ram
- 4gb hdd
- *maybe IPv6 must be disabled*
Tested with Ubuntu 22.10 (EOL!) (from Proxmox templates), actually its consuming 100mb ram and 750mb hdd.
**Prepare your system**
- Make sure your timezone is set correct on you fastapi-dls server and your client
This guide does not show how to install vGPU host drivers! Look at the official documentation packed with the driver
releases.
## Docker
Docker-Images are available here:
Docker-Images are available here for Intel (x86), AMD (amd64) and ARM (arm64):
- [Docker-Hub](https://hub.docker.com/repository/docker/collinwebdesigns/fastapi-dls): `collinwebdesigns/fastapi-dls:latest`
- [GitLab-Registry](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/container_registry): `registry.git.collinwebdesigns.de/oscar.krause/fastapi-dls/main:latest`
- [GitLab-Registry](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/container_registry): `registry.git.collinwebdesigns.de/oscar.krause/fastapi-dls:latest`
The images include database drivers for `postgres`, `mariadb` and `sqlite`.
**Run this on the Docker-Host**
@@ -43,16 +84,20 @@ docker run -e DLS_URL=`hostname -i` -e DLS_PORT=443 -p 443:443 -v $WORKING_DIR:/
**Docker-Compose / Deploy stack**
Goto [`docker-compose.yml`](docker-compose.yml) for more advanced example (with reverse proxy usage).
See [`examples`](examples) directory for more advanced examples (with reverse proxy usage).
> Adjust `REQUIRED` variables as needed
```yaml
version: '3.9'
x-dls-variables: &dls-variables
TZ: Europe/Berlin # REQUIRED, set your timezone correctly on fastapi-dls AND YOUR CLIENTS !!!
DLS_URL: localhost # REQUIRED, change to your ip or hostname
DLS_PORT: 443
LEASE_EXPIRE_DAYS: 90
LEASE_EXPIRE_DAYS: 90 # 90 days is maximum
DATABASE: sqlite:////app/database/db.sqlite
DEBUG: false
services:
dls:
@@ -65,14 +110,22 @@ services:
volumes:
- /opt/docker/fastapi-dls/cert:/app/cert
- dls-db:/app/database
logging: # optional, for those who do not need logs
driver: "json-file"
options:
max-file: 5
max-size: 10m
volumes:
dls-db:
```
## Debian/Ubuntu (manual method using `git clone` and python virtual environment)
## Debian / Ubuntu / macOS (manual method using `git clone` and python virtual environment)
Tested on `Debian 11 (bullseye)`, Ubuntu may also work.
Tested on `Debian 11 (bullseye)`, `Debian 12 (bookworm)` and `macOS Ventura (13.6)`, Ubuntu may also work.
**Please note that setup on macOS differs from Debian based systems.**
**Make sure you are logged in as root.**
**Install requirements**
@@ -98,7 +151,7 @@ chown -R www-data:www-data $WORKING_DIR
```shell
WORKING_DIR=/opt/fastapi-dls/app/cert
mkdir $WORKING_DIR
mkdir -p $WORKING_DIR
cd $WORKING_DIR
# create instance private and public key for singing JWT's
openssl genrsa -out $WORKING_DIR/instance.private.pem 2048
@@ -114,12 +167,17 @@ This is only to test whether the service starts successfully.
```shell
cd /opt/fastapi-dls/app
sudo -u www-data /opt/fastapi-dls/venv/bin/uvicorn main:app --app-dir=/opt/fastapi-dls/app
# or
su - www-data -c "/opt/fastapi-dls/venv/bin/uvicorn main:app --app-dir=/opt/fastapi-dls/app"
```
**Create config file**
> Adjust `DLS_URL` as needed (accessing from LAN won't work with 127.0.0.1)
```shell
mkdir /etc/fastapi-dls
cat <<EOF >/etc/fastapi-dls/env
DLS_URL=127.0.0.1
DLS_PORT=443
@@ -164,16 +222,125 @@ EOF
Now you have to run `systemctl daemon-reload`. After that you can start service
with `systemctl start fastapi-dls.service` and enable autostart with `systemctl enable fastapi-dls.service`.
## Debian/Ubuntu (using `dpkg`)
## openSUSE Leap (manual method using `git clone` and python virtual environment)
Tested on `openSUSE Leap 15.4`, openSUSE Tumbleweed may also work.
**Install requirements**
```shell
zypper in -y python310 python3-virtualenv python3-pip
```
**Install FastAPI-DLS**
```shell
BASE_DIR=/opt/fastapi-dls
SERVICE_USER=dls
mkdir -p ${BASE_DIR}
cd ${BASE_DIR}
git clone https://git.collinwebdesigns.de/oscar.krause/fastapi-dls .
python3.10 -m venv venv
source venv/bin/activate
pip install -r requirements.txt
deactivate
useradd -r ${SERVICE_USER} -M -d /opt/fastapi-dls
chown -R ${SERVICE_USER} ${BASE_DIR}
```
**Create keypair and webserver certificate**
```shell
CERT_DIR=${BASE_DIR}/app/cert
SERVICE_USER=dls
mkdir ${CERT_DIR}
cd ${CERT_DIR}
# create instance private and public key for singing JWT's
openssl genrsa -out ${CERT_DIR}/instance.private.pem 2048
openssl rsa -in ${CERT_DIR}/instance.private.pem -outform PEM -pubout -out ${CERT_DIR}/instance.public.pem
# create ssl certificate for integrated webserver (uvicorn) - because clients rely on ssl
openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout ${CERT_DIR}/webserver.key -out ${CERT_DIR}/webserver.crt
chown -R ${SERVICE_USER} ${CERT_DIR}
```
**Test Service**
This is only to test whether the service starts successfully.
```shell
BASE_DIR=/opt/fastapi-dls
SERVICE_USER=dls
cd ${BASE_DIR}
sudo -u ${SERVICE_USER} ${BASE_DIR}/venv/bin/uvicorn main:app --app-dir=${BASE_DIR}/app
# or
su - ${SERVICE_USER} -c "${BASE_DIR}/venv/bin/uvicorn main:app --app-dir=${BASE_DIR}/app"
```
**Create config file**
> Adjust `DLS_URL` as needed (accessing from LAN won't work with 127.0.0.1)
```shell
BASE_DIR=/opt/fastapi-dls
cat <<EOF >/etc/fastapi-dls/env
DLS_URL=127.0.0.1
DLS_PORT=443
LEASE_EXPIRE_DAYS=90
DATABASE=sqlite:///${BASE_DIR}/app/db.sqlite
EOF
```
**Create service**
```shell
BASE_DIR=/opt/fastapi-dls
SERVICE_USER=dls
cat <<EOF >/etc/systemd/system/fastapi-dls.service
[Unit]
Description=Service for fastapi-dls vGPU licensing service
After=network.target
[Service]
User=${SERVICE_USER}
AmbientCapabilities=CAP_NET_BIND_SERVICE
WorkingDirectory=${BASE_DIR}/app
EnvironmentFile=/etc/fastapi-dls/env
ExecStart=${BASE_DIR}/venv/bin/uvicorn main:app \\
--env-file /etc/fastapi-dls/env \\
--host \$DLS_URL --port \$DLS_PORT \\
--app-dir ${BASE_DIR}/app \\
--ssl-keyfile ${BASE_DIR}/app/cert/webserver.key \\
--ssl-certfile ${BASE_DIR}/app/cert/webserver.crt \\
--proxy-headers
Restart=always
KillSignal=SIGQUIT
Type=simple
NotifyAccess=all
[Install]
WantedBy=multi-user.target
EOF
```
Now you have to run `systemctl daemon-reload`. After that you can start service
with `systemctl start fastapi-dls.service` and enable autostart with `systemctl enable fastapi-dls.service`.
## Debian / Ubuntu (using `dpkg` / `apt`)
Packages are available here:
- [GitLab-Registry](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/packages)
Successful tested with:
Successful tested with (**LTS Version**):
- Debian 12 (Bookworm) (works but not recommended because it is currently in *testing* state)
- Ubuntu 22.10 (Kinetic Kudu)
- **Debian 12 (Bookworm)** (EOL: June 06, 2026)
- *Ubuntu 22.10 (Kinetic Kudu)* (EOL: July 20, 2023)
- *Ubuntu 23.04 (Lunar Lobster)* (EOL: January 2024)
- *Ubuntu 23.10 (Mantic Minotaur)* (EOL: July 2024)
- **Ubuntu 24.04 (Noble Numbat)** (EOL: Apr 2029)
- *Ubuntu 24.10 (Oracular Oriole)* (EOL: Jul 2025)
Not working with:
@@ -194,6 +361,7 @@ apt-get install -f --fix-missing
```
Start with `systemctl start fastapi-dls.service` and enable autostart with `systemctl enable fastapi-dls.service`.
Now you have to edit `/etc/fastapi-dls/env` as needed.
## ArchLinux (using `pacman`)
@@ -215,6 +383,27 @@ pacman -U --noconfirm fastapi-dls.pkg.tar.zst
```
Start with `systemctl start fastapi-dls.service` and enable autostart with `systemctl enable fastapi-dls.service`.
Now you have to edit `/etc/default/fastapi-dls` as needed.
## unRAID
1. Download [this xml file](.UNRAID/FastAPI-DLS.xml)
2. Put it in /boot/config/plugins/dockerMan/templates-user/
3. Go to Docker page, scroll down to `Add Container`, click on Template list and choose `FastAPI-DLS`
4. Open terminal/ssh, follow the instructions in overview description
5. Setup your container `IP`, `Port`, `DLS_URL` and `DLS_PORT`
6. Apply and let it boot up
*Unraid users must also make sure they have Host access to custom networks enabled if unraid is the vgpu guest*.
Continue [here](#unraid-guest) for docker guest setup.
## NixOS
Tanks to [@mrzenc](https://github.com/mrzenc) for [fastapi-dls-nixos](https://github.com/mrzenc/fastapi-dls-nixos).
> [!note] Native NixOS-Package
> There is a [pull request](https://github.com/NixOS/nixpkgs/pull/358647) which adds fastapi-dls into nixpkgs.
## Let's Encrypt Certificate (optional)
@@ -234,21 +423,21 @@ After first success you have to replace `--issue` with `--renew`.
# Configuration
| Variable | Default | Usage |
|------------------------|----------------------------------------|------------------------------------------------------------------------------------------------------|
| `DEBUG` | `false` | Toggles `fastapi` debug mode |
| `DLS_URL` | `localhost` | Used in client-token to tell guest driver where dls instance is reachable |
| `DLS_PORT` | `443` | Used in client-token to tell guest driver where dls instance is reachable |
| `TOKEN_EXPIRE_DAYS` | `1` | Client auth-token validity (used for authenticate client against api, **not `.tok` file!**) |
| `LEASE_EXPIRE_DAYS` | `90` | Lease time in days |
| `LEASE_RENEWAL_PERIOD` | `0.15` | The percentage of the lease period that must elapse before a licensed client can renew a license \*1 |
| `DATABASE` | `sqlite:///db.sqlite` | See [official SQLAlchemy docs](https://docs.sqlalchemy.org/en/14/core/engines.html) |
| `CORS_ORIGINS` | `https://{DLS_URL}` | Sets `Access-Control-Allow-Origin` header (comma separated string) \*2 |
| `SITE_KEY_XID` | `00000000-0000-0000-0000-000000000000` | Site identification uuid |
| `INSTANCE_REF` | `10000000-0000-0000-0000-000000000001` | Instance identification uuid |
| `ALLOTMENT_REF` | `20000000-0000-0000-0000-000000000001` | Allotment identification uuid |
| `INSTANCE_KEY_RSA` | `<app-dir>/cert/instance.private.pem` | Site-wide private RSA key for singing JWTs \*3 |
| `INSTANCE_KEY_PUB` | `<app-dir>/cert/instance.public.pem` | Site-wide public key \*3 |
| Variable | Default | Usage |
|--------------------------|----------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------|
| `DEBUG` | `false` | Toggles `fastapi` debug mode |
| `DLS_URL` | `localhost` | Used in client-token to tell guest driver where dls instance is reachable |
| `DLS_PORT` | `443` | Used in client-token to tell guest driver where dls instance is reachable |
| `TOKEN_EXPIRE_DAYS` | `1` | Client auth-token validity (used for authenticate client against api, **not `.tok` file!**) |
| `LEASE_EXPIRE_DAYS` | `90` | Lease time in days |
| `LEASE_RENEWAL_PERIOD` | `0.15` | The percentage of the lease period that must elapse before a licensed client can renew a license \*1 |
| `DATABASE` | `sqlite:///db.sqlite` | See [official SQLAlchemy docs](https://docs.sqlalchemy.org/en/14/core/engines.html) |
| `CORS_ORIGINS` | `https://{DLS_URL}` | Sets `Access-Control-Allow-Origin` header (comma separated string) \*2 |
| `SITE_KEY_XID` | `00000000-0000-0000-0000-000000000000` | Site identification uuid |
| `INSTANCE_REF` | `10000000-0000-0000-0000-000000000001` | Instance identification uuid |
| `ALLOTMENT_REF` | `20000000-0000-0000-0000-000000000001` | Allotment identification uuid |
| `INSTANCE_KEY_RSA` | `<app-dir>/cert/instance.private.pem` | Site-wide private RSA key for singing JWTs \*3 |
| `INSTANCE_KEY_PUB` | `<app-dir>/cert/instance.public.pem` | Site-wide public key \*3 |
\*1 For example, if the lease period is one day and the renewal period is 20%, the client attempts to renew its license
every 4.8 hours. If network connectivity is lost, the loss of connectivity is detected during license renewal and the
@@ -256,17 +445,14 @@ client has 19.2 hours in which to re-establish connectivity before its license e
\*2 Always use `https`, since guest-drivers only support secure connections!
\*3 If you recreate instance keys you need to **recreate client-token for each guest**!
\*3 If you recreate your instance keys you need to **recreate client-token for each guest**!
# Setup (Client)
**The token file has to be copied! It's not enough to C&P file contents, because there can be special characters.**
Successfully tested with this package versions:
- `14.3` (Linux-Host: `510.108.03`, Linux-Guest: `510.108.03`, Windows-Guest: `513.91`)
- `14.4` (Linux-Host: `510.108.03`, Linux-Guest: `510.108.03`, Windows-Guest: `514.08`)
- `15.0` (Linux-Host: `525.60.12`, Linux-Guest: `525.60.13`, Windows-Guest: `527.41`)
This guide does not show how to install vGPU guest drivers! Look at the official documentation packed with the driver
releases.
## Linux
@@ -318,7 +504,7 @@ Restart-Service NVDisplay.ContainerLocalSystem
Check licensing status:
```shell
& 'C:\Program Files\NVIDIA Corporation\NVSMI\nvidia-smi.exe' -q | Select-String "License"
& 'nvidia-smi' -q | Select-String "License"
```
Output should be something like:
@@ -330,33 +516,44 @@ vGPU Software Licensed Product
Done. For more information check [troubleshoot section](#troubleshoot).
# Endpoints
## unRAID Guest
### `GET /`
1. Make sure you create a folder in a linux filesystem (BTRFS/XFS/EXT4...), I recommend `/mnt/user/system/nvidia` (this is where docker and libvirt preferences are saved, so it's a good place to have that)
2. Edit the script to put your `DLS_IP`, `DLS_PORT` and `TOKEN_PATH`, properly
3. Install `User Scripts` plugin from *Community Apps* (the Apps page, or google User Scripts Unraid if you're not using CA)
4. Go to `Settings > Users Scripts > Add New Script`
5. Give it a name (the name must not contain spaces preferably)
6. Click on the *gear icon* to the left of the script name then edit script
7. Paste the script and save
8. Set schedule to `At First Array Start Only`
9. Click on Apply
# API Endpoints
<details>
<summary>show</summary>
**`GET /`**
Redirect to `/-/readme`.
### `GET /-/health`
**`GET /-/health`**
Status endpoint, used for *healthcheck*.
### `GET /-/config`
**`GET /-/config`**
Shows current runtime environment variables and their values.
### `GET /-/readme`
**`GET /-/readme`**
HTML rendered README.md.
### `GET /-/docs`, `GET /-/redoc`
OpenAPI specifications rendered from `GET /-/openapi.json`.
### `GET /-/manage`
**`GET /-/manage`**
Shows a very basic UI to delete origins or leases.
### `GET /-/origins?leases=false`
**`GET /-/origins?leases=false`**
List registered origins.
@@ -364,11 +561,11 @@ List registered origins.
|-----------------|---------|--------------------------------------|
| `leases` | `false` | Include referenced leases per origin |
### `DELETE /-/origins`
**`DELETE /-/origins`**
Deletes all origins and their leases.
### `GET /-/leases?origin=false`
**`GET /-/leases?origin=false`**
List current leases.
@@ -376,22 +573,29 @@ List current leases.
|-----------------|---------|-------------------------------------|
| `origin` | `false` | Include referenced origin per lease |
### `DELETE /-/lease/{lease_ref}`
**`DELETE /-/lease/{lease_ref}`**
Deletes an lease.
### `GET /-/client-token`
**`GET /-/client-token`**
Generate client token, (see [installation](#installation)).
### Others
**Others**
There are many other internal api endpoints for handling authentication and lease process.
</details>
# Troubleshoot
# Troubleshoot / Debug
**Please make sure that fastapi-dls and your guests are on the same timezone!**
Maybe you have to disable IPv6 on the machine you are running FastAPI-DLS.
## Docker
Logs are available with `docker logs <container>`. To get the correct container-id use `docker container ls` or `docker ps`.
## Linux
Logs are available with `journalctl -u nvidia-gridd -f`.
@@ -402,11 +606,26 @@ Logs are available in `C:\Users\Public\Documents\Nvidia\LoggingLog.NVDisplay.Con
# Known Issues
## Generic
### `Failed to acquire license from <ip> (Info: <license> - Error: The allowed time to process response has expired)`
- Did your timezone settings are correct on fastapi-dls **and your guest**?
- Did you download the client-token more than an hour ago?
Please download a new client-token. The guest have to register within an hour after client-token was created.
### `jose.exceptions.JWTError: Signature verification failed.`
- Did you recreate `instance.public.pem` / `instance.private.pem`?
Then you have to download a **new** client-token on each of your guests.
## Linux
### `uvicorn.error:Invalid HTTP request received.`
### Invalid HTTP request
This message can be ignored.
This error message: `uvicorn.error:Invalid HTTP request received.` can be ignored.
- Ref. https://github.com/encode/uvicorn/issues/441
@@ -449,7 +668,7 @@ only
gets a valid local license.
<details>
<summary>Log</summary>
<summary>Log example</summary>
**Display-Container-LS**
@@ -515,7 +734,7 @@ The error message can safely be ignored (since we have no license limitation :P)
<0>:End Logging
```
#### log with nginx as reverse proxy (see [docker-compose.yml](docker-compose.yml))
#### log with nginx as reverse proxy (see [docker-compose-http-and-https.yml](examples/docker-compose-http-and-https.yml))
```
<1>:NLS initialized
@@ -532,9 +751,57 @@ The error message can safely be ignored (since we have no license limitation :P)
</details>
# vGPU Software Compatibility Matrix
**18.x Drivers are not supported on FastAPI-DLS Versions < 1.6.0**
<details>
<summary>Show Table</summary>
Successfully tested with this package versions.
| vGPU Suftware | Driver Branch | Linux vGPU Manager | Linux Driver | Windows Driver | Release Date | EOL Date |
|:-------------:|:-------------:|--------------------|--------------|----------------|--------------:|--------------:|
| `17.5` | R550 | `550.144.02` | `550.144.03` | `553.62` | January 2025 | June 2025 |
| `17.4` | R550 | `550.127.06` | `550.127.05` | `553.24` | October 2024 | |
| `17.3` | R550 | `550.90.05` | `550.90.07` | `552.74` | July 2024 | |
| `17.2` | R550 | `550.90.05` | `550.90.07` | `552.55` | June 2024 | |
| `17.1` | R550 | `550.54.16` | `550.54.15` | `551.78` | March 2024 | |
| `17.0` | R550 | `550.54.10` | `550.54.14` | `551.61` | February 2024 | |
| `16.9` | R535 | `535.230.02` | `535.216.01` | `539.19` | October 2024 | July 2026 |
| `16.8` | R535 | `535.216.01` | `535.216.01` | `538.95` | October 2024 | |
| `16.7` | R535 | `535.183.04` | `535.183.06` | `538.78` | July 2024 | |
| `16.6` | R535 | `535.183.04` | `535.183.01` | `538.67` | June 2024 | |
| `16.5` | R535 | `535.161.05` | `535.161.08` | `538.46` | February 2024 | |
| `16.4` | R535 | `535.161.05` | `535.161.07` | `538.33` | February 2024 | |
| `16.3` | R535 | `535.154.02` | `535.154.05` | `538.15` | January 2024 | |
| `16.2` | R535 | `535.129.03` | `535.129.03` | `537.70` | October 2023 | |
| `16.1` | R535 | `535.104.06` | `535.104.05` | `537.13` | August 2023 | |
| `16.0` | R535 | `535.54.06` | `535.54.03` | `536.22` | July 2023 | |
| `15.4` | R525 | `525.147.01` | `525.147.05` | `529.19` | June 2023 | December 2023 |
| `14.4` | R510 | `510.108.03` | `510.108.03` | `514.08` | December 2022 | February 2023 |
</details>
- https://docs.nvidia.com/grid/index.html
- https://docs.nvidia.com/grid/gpus-supported-by-vgpu.html
*To get the latest drivers, visit Nvidia or search in Discord-Channel `GPU Unlocking` (Server-ID: `829786927829745685`)
on channel `licensing`
# Credits
Thanks to vGPU community and all who uses this project and report bugs.
Special thanks to @samicrusader who created build file for ArchLinux.
Special thanks to:
- `samicrusader` who created build file for **ArchLinux**
- `cyrus` who wrote the section for **openSUSE**
- `midi` who wrote the section for **unRAID**
- `polloloco` who wrote the *[NVIDIA vGPU Guide](https://gitlab.com/polloloco/vgpu-proxmox)*
- `DualCoder` who creates the `vgpu_unlock` functionality [vgpu_unlock](https://github.com/DualCoder/vgpu_unlock)
- `Krutav Shah` who wrote the [vGPU_Unlock Wiki](https://docs.google.com/document/d/1pzrWJ9h-zANCtyqRgS7Vzla0Y8Ea2-5z2HEi4X75d2Q/)
- `Wim van 't Hoog` for the [Proxmox All-In-One Installer Script](https://wvthoog.nl/proxmox-vgpu-v3/)
- `mrzenc` who wrote [fastapi-dls-nixos](https://github.com/mrzenc/fastapi-dls-nixos)
And thanks to all people who contributed to all these libraries!

27
ROADMAP.md Normal file
View File

@@ -0,0 +1,27 @@
# Roadmap
I am planning to implement the following features in the future.
## HA - High Availability
Support Failover-Mode (secondary ip address) as in official DLS.
**Note**: There is no Load-Balancing / Round-Robin HA Mode supported! If you want to use that, consider to use
Docker-Swarm with shared/cluster database (e.g. postgres).
*See [ha branch](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/tree/ha) for current status.*
## UI - User Interface
Add a user interface to manage origins and leases.
*See [ui branch](https://git.collinwebdesigns.de/oscar.krause/fastapi-dls/-/tree/ui) for current status.*
## Config Database
Instead of using environment variables, configuration files and manually create certificates, store configs and
certificates in database (like origins and leases). Also, there should be provided a startup assistant to prefill
required attributes and create instance-certificates. This is more user-friendly and should improve fist setup.

View File

@@ -1,53 +1,89 @@
import logging
import sys
from base64 import b64encode as b64enc
from calendar import timegm
from contextlib import asynccontextmanager
from datetime import datetime, UTC
from hashlib import sha256
from uuid import uuid4
from os.path import join, dirname
from json import loads as json_loads
from os import getenv as env
from os.path import join, dirname
from uuid import uuid4
from dateutil.relativedelta import relativedelta
from dotenv import load_dotenv
from fastapi import FastAPI
from fastapi.requests import Request
from json import loads as json_loads
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
from calendar import timegm
from jose import jws, jwk, jwt, JWTError
from jose import jws, jwt, JWTError
from jose.constants import ALGORITHMS
from starlette.middleware.cors import CORSMiddleware
from starlette.responses import StreamingResponse, JSONResponse as JSONr, HTMLResponse as HTMLr, Response, RedirectResponse
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from starlette.middleware.cors import CORSMiddleware
from starlette.responses import StreamingResponse, JSONResponse as JSONr, HTMLResponse as HTMLr, Response, RedirectResponse
from util import load_key, load_file
from orm import Origin, Lease, init as db_init, migrate
from orm import Origin, Lease, init as db_init, migrate, Instance, Site
logger = logging.getLogger()
# Load variables
load_dotenv('../version.env')
# Get current timezone
TZ = datetime.now().astimezone().tzinfo
# Load basic variables
VERSION, COMMIT, DEBUG = env('VERSION', 'unknown'), env('COMMIT', 'unknown'), bool(env('DEBUG', False))
config = dict(openapi_url='/-/openapi.json', docs_url='/-/docs', redoc_url='/-/redoc')
app = FastAPI(title='FastAPI-DLS', description='Minimal Delegated License Service (DLS).', version=VERSION, **config)
# Database connection
db = create_engine(str(env('DATABASE', 'sqlite:///db.sqlite')))
db_init(db), migrate(db)
# everything prefixed with "INSTANCE_*" is used as "SERVICE_INSTANCE_*" or "SI_*" in official dls service
# Load DLS variables (all prefixed with "INSTANCE_*" is used as "SERVICE_INSTANCE_*" or "SI_*" in official dls service)
DLS_URL = str(env('DLS_URL', 'localhost'))
DLS_PORT = int(env('DLS_PORT', '443'))
SITE_KEY_XID = str(env('SITE_KEY_XID', '00000000-0000-0000-0000-000000000000'))
INSTANCE_REF = str(env('INSTANCE_REF', '10000000-0000-0000-0000-000000000001'))
ALLOTMENT_REF = str(env('ALLOTMENT_REF', '20000000-0000-0000-0000-000000000001'))
INSTANCE_KEY_RSA = load_key(str(env('INSTANCE_KEY_RSA', join(dirname(__file__), 'cert/instance.private.pem'))))
INSTANCE_KEY_PUB = load_key(str(env('INSTANCE_KEY_PUB', join(dirname(__file__), 'cert/instance.public.pem'))))
TOKEN_EXPIRE_DELTA = relativedelta(days=int(env('TOKEN_EXPIRE_DAYS', 1)), hours=int(env('TOKEN_EXPIRE_HOURS', 0)))
LEASE_EXPIRE_DELTA = relativedelta(days=int(env('LEASE_EXPIRE_DAYS', 90)), hours=int(env('LEASE_EXPIRE_HOURS', 0)))
LEASE_RENEWAL_PERIOD = float(env('LEASE_RENEWAL_PERIOD', 0.15))
LEASE_RENEWAL_DELTA = timedelta(days=int(env('LEASE_EXPIRE_DAYS', 90)), hours=int(env('LEASE_EXPIRE_HOURS', 0)))
CORS_ORIGINS = str(env('CORS_ORIGINS', '')).split(',') if (env('CORS_ORIGINS')) else [f'https://{DLS_URL}']
jwt_encode_key = jwk.construct(INSTANCE_KEY_RSA.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
jwt_decode_key = jwk.construct(INSTANCE_KEY_PUB.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
ALLOTMENT_REF = str(env('ALLOTMENT_REF', '20000000-0000-0000-0000-000000000001')) # todo
# Logging
LOG_LEVEL = logging.DEBUG if DEBUG else logging.INFO
logging.basicConfig(format='[{levelname:^7}] [{module:^15}] {message}', style='{')
logger = logging.getLogger(__name__)
logger.setLevel(LOG_LEVEL)
logging.getLogger('util').setLevel(LOG_LEVEL)
logging.getLogger('DriverMatrix').setLevel(LOG_LEVEL)
# FastAPI
@asynccontextmanager
async def lifespan(_: FastAPI):
# on startup
default_instance = Instance.get_default_instance(db)
lease_renewal_period = default_instance.lease_renewal_period
lease_renewal_delta = default_instance.get_lease_renewal_delta()
client_token_expire_delta = default_instance.get_client_token_expire_delta()
logger.info(f'''
Using timezone: {str(TZ)}. Make sure this is correct and match your clients!
Your clients will renew their license every {str(Lease.calculate_renewal(lease_renewal_period, lease_renewal_delta))}.
If the renewal fails, the license is valid for {str(lease_renewal_delta)}.
Your client-token file (.tok) is valid for {str(client_token_expire_delta)}.
''')
logger.info(f'Debug is {"enabled" if DEBUG else "disabled"}.')
validate_settings()
yield
# on shutdown
logger.info(f'Shutting down ...')
config = dict(openapi_url=None, docs_url=None, redoc_url=None) # dict(openapi_url='/-/openapi.json', docs_url='/-/docs', redoc_url='/-/redoc')
app = FastAPI(title='FastAPI-DLS', description='Minimal Delegated License Service (DLS).', version=VERSION, lifespan=lifespan, **config)
app.debug = DEBUG
app.add_middleware(
@@ -58,15 +94,28 @@ app.add_middleware(
allow_headers=['*'],
)
logger.setLevel(logging.DEBUG if DEBUG else logging.INFO)
def __get_token(request: Request) -> dict:
# Helper
def __get_token(request: Request, jwt_decode_key: "jose.jwt") -> dict:
authorization_header = request.headers.get('authorization')
token = authorization_header.split(' ')[1]
return jwt.decode(token=token, key=jwt_decode_key, algorithms=ALGORITHMS.RS256, options={'verify_aud': False})
def validate_settings():
session = sessionmaker(bind=db)()
lease_expire_delta_min, lease_expire_delta_max = 86_400, 7_776_000
for instance in session.query(Instance).all():
lease_expire_delta = instance.lease_expire_delta
if lease_expire_delta < 86_400 or lease_expire_delta > 7_776_000:
logging.warning(f'> [ instance ]: {instance.instance_ref}: "lease_expire_delta" should be between {lease_expire_delta_min} and {lease_expire_delta_max}')
session.close()
# Endpoints
@app.get('/', summary='Index')
async def index():
return RedirectResponse('/-/readme')
@@ -78,32 +127,36 @@ async def _index():
@app.get('/-/health', summary='* Health')
async def _health(request: Request):
async def _health():
return JSONr({'status': 'up'})
@app.get('/-/config', summary='* Config', description='returns environment variables.')
async def _config():
default_site, default_instance = Site.get_default_site(db), Instance.get_default_instance(db)
return JSONr({
'VERSION': str(VERSION),
'COMMIT': str(COMMIT),
'DEBUG': str(DEBUG),
'DLS_URL': str(DLS_URL),
'DLS_PORT': str(DLS_PORT),
'SITE_KEY_XID': str(SITE_KEY_XID),
'INSTANCE_REF': str(INSTANCE_REF),
'SITE_KEY_XID': str(default_site.site_key),
'INSTANCE_REF': str(default_instance.instance_ref),
'ALLOTMENT_REF': [str(ALLOTMENT_REF)],
'TOKEN_EXPIRE_DELTA': str(TOKEN_EXPIRE_DELTA),
'LEASE_EXPIRE_DELTA': str(LEASE_EXPIRE_DELTA),
'LEASE_RENEWAL_PERIOD': str(LEASE_RENEWAL_PERIOD),
'TOKEN_EXPIRE_DELTA': str(default_instance.get_token_expire_delta()),
'LEASE_EXPIRE_DELTA': str(default_instance.get_lease_expire_delta()),
'LEASE_RENEWAL_PERIOD': str(default_instance.lease_renewal_period),
'CORS_ORIGINS': str(CORS_ORIGINS),
'TZ': str(TZ),
})
@app.get('/-/readme', summary='* Readme')
async def _readme():
from markdown import markdown
content = load_file('../README.md').decode('utf-8')
from util import load_file
content = load_file(join(dirname(__file__), '../README.md')).decode('utf-8')
return HTMLr(markdown(text=content, extensions=['tables', 'fenced_code', 'md_in_html', 'nl2br', 'toc']))
@@ -152,8 +205,7 @@ async def _origins(request: Request, leases: bool = False):
for origin in session.query(Origin).all():
x = origin.serialize()
if leases:
serialize = dict(renewal_period=LEASE_RENEWAL_PERIOD, renewal_delta=LEASE_RENEWAL_DELTA)
x['leases'] = list(map(lambda _: _.serialize(**serialize), Lease.find_by_origin_ref(db, origin.origin_ref)))
x['leases'] = list(map(lambda _: _.serialize(), Lease.find_by_origin_ref(db, origin.origin_ref)))
response.append(x)
session.close()
return JSONr(response)
@@ -170,8 +222,7 @@ async def _leases(request: Request, origin: bool = False):
session = sessionmaker(bind=db)()
response = []
for lease in session.query(Lease).all():
serialize = dict(renewal_period=LEASE_RENEWAL_PERIOD, renewal_delta=LEASE_RENEWAL_DELTA)
x = lease.serialize(**serialize)
x = lease.serialize()
if origin:
lease_origin = session.query(Origin).filter(Origin.origin_ref == lease.origin_ref).first()
if lease_origin is not None:
@@ -181,6 +232,12 @@ async def _leases(request: Request, origin: bool = False):
return JSONr(response)
@app.delete('/-/leases/expired', summary='* Leases')
async def _lease_delete_expired(request: Request):
Lease.delete_expired(db)
return Response(status_code=201)
@app.delete('/-/lease/{lease_ref}', summary='* Lease')
async def _lease_delete(request: Request, lease_ref: str):
if Lease.delete(db, lease_ref) == 1:
@@ -191,8 +248,14 @@ async def _lease_delete(request: Request, lease_ref: str):
# venv/lib/python3.9/site-packages/nls_core_service_instance/service_instance_token_manager.py
@app.get('/-/client-token', summary='* Client-Token', description='creates a new messenger token for this service instance')
async def _client_token():
cur_time = datetime.utcnow()
exp_time = cur_time + relativedelta(years=12)
cur_time = datetime.now(UTC)
default_instance = Instance.get_default_instance(db)
public_key = default_instance.get_public_key()
# todo: implemented request parameter to support different instances
jwt_encode_key = default_instance.get_jwt_encode_key()
exp_time = cur_time + default_instance.get_client_token_expire_delta()
payload = {
"jti": str(uuid4()),
@@ -205,7 +268,7 @@ async def _client_token():
"scope_ref_list": [ALLOTMENT_REF],
"fulfillment_class_ref_list": [],
"service_instance_configuration": {
"nls_service_instance_ref": INSTANCE_REF,
"nls_service_instance_ref": default_instance.instance_ref,
"svc_port_set_list": [
{
"idx": 0,
@@ -217,10 +280,10 @@ async def _client_token():
},
"service_instance_public_key_configuration": {
"service_instance_public_key_me": {
"mod": hex(INSTANCE_KEY_PUB.public_key().n)[2:],
"exp": int(INSTANCE_KEY_PUB.public_key().e),
"mod": hex(public_key.raw().public_numbers().n)[2:],
"exp": int(public_key.raw().public_numbers().e),
},
"service_instance_public_key_pem": INSTANCE_KEY_PUB.export_key().decode('utf-8'),
"service_instance_public_key_pem": public_key.pem().decode('utf-8'),
"key_retention_mode": "LATEST_ONLY"
},
}
@@ -237,10 +300,10 @@ async def _client_token():
# venv/lib/python3.9/site-packages/nls_services_auth/test/test_origins_controller.py
@app.post('/auth/v1/origin', description='find or create an origin')
async def auth_v1_origin(request: Request):
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.utcnow()
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.now(UTC)
origin_ref = j.get('candidate_origin_ref')
logging.info(f'> [ origin ]: {origin_ref}: {j}')
logger.info(f'> [ origin ]: {origin_ref}: {j}')
data = Origin(
origin_ref=origin_ref,
@@ -267,10 +330,10 @@ async def auth_v1_origin(request: Request):
# venv/lib/python3.9/site-packages/nls_services_auth/test/test_origins_controller.py
@app.post('/auth/v1/origin/update', description='update an origin evidence')
async def auth_v1_origin_update(request: Request):
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.utcnow()
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.now(UTC)
origin_ref = j.get('origin_ref')
logging.info(f'> [ update ]: {origin_ref}: {j}')
logger.info(f'> [ update ]: {origin_ref}: {j}')
data = Origin(
origin_ref=origin_ref,
@@ -294,21 +357,24 @@ async def auth_v1_origin_update(request: Request):
# venv/lib/python3.9/site-packages/nls_core_auth/auth.py - CodeResponse
@app.post('/auth/v1/code', description='get an authorization code')
async def auth_v1_code(request: Request):
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.utcnow()
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.now(UTC)
origin_ref = j.get('origin_ref')
logging.info(f'> [ code ]: {origin_ref}: {j}')
logger.info(f'> [ code ]: {origin_ref}: {j}')
delta = relativedelta(minutes=15)
expires = cur_time + delta
default_site = Site.get_default_site(db)
jwt_encode_key = Instance.get_default_instance(db).get_jwt_encode_key()
payload = {
'iat': timegm(cur_time.timetuple()),
'exp': timegm(expires.timetuple()),
'challenge': j.get('code_challenge'),
'origin_ref': j.get('origin_ref'),
'key_ref': SITE_KEY_XID,
'kid': SITE_KEY_XID
'key_ref': default_site.site_key,
'kid': default_site.site_key,
}
auth_code = jws.sign(payload, key=jwt_encode_key, headers={'kid': payload.get('kid')}, algorithm=ALGORITHMS.RS256)
@@ -326,22 +392,25 @@ async def auth_v1_code(request: Request):
# venv/lib/python3.9/site-packages/nls_core_auth/auth.py - TokenResponse
@app.post('/auth/v1/token', description='exchange auth code and verifier for token')
async def auth_v1_token(request: Request):
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.utcnow()
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.now(UTC)
default_site, default_instance = Site.get_default_site(db), Instance.get_default_instance(db)
jwt_encode_key, jwt_decode_key = default_instance.get_jwt_encode_key(), default_instance.get_jwt_decode_key()
try:
payload = jwt.decode(token=j.get('auth_code'), key=jwt_decode_key)
payload = jwt.decode(token=j.get('auth_code'), key=jwt_decode_key, algorithms=ALGORITHMS.RS256)
except JWTError as e:
return JSONr(status_code=400, content={'status': 400, 'title': 'invalid token', 'detail': str(e)})
origin_ref = payload.get('origin_ref')
logging.info(f'> [ auth ]: {origin_ref}: {j}')
logger.info(f'> [ auth ]: {origin_ref}: {j}')
# validate the code challenge
challenge = b64enc(sha256(j.get('code_verifier').encode('utf-8')).digest()).rstrip(b'=').decode('utf-8')
if payload.get('challenge') != challenge:
return JSONr(status_code=401, content={'status': 401, 'detail': 'expected challenge did not match verifier'})
access_expires_on = cur_time + TOKEN_EXPIRE_DELTA
access_expires_on = cur_time + default_instance.get_token_expire_delta()
new_payload = {
'iat': timegm(cur_time.timetuple()),
@@ -350,8 +419,8 @@ async def auth_v1_token(request: Request):
'aud': 'https://cls.nvidia.org',
'exp': timegm(access_expires_on.timetuple()),
'origin_ref': origin_ref,
'key_ref': SITE_KEY_XID,
'kid': SITE_KEY_XID,
'key_ref': default_site.site_key,
'kid': default_site.site_key,
}
auth_token = jwt.encode(new_payload, key=jwt_encode_key, headers={'kid': payload.get('kid')}, algorithm=ALGORITHMS.RS256)
@@ -368,16 +437,19 @@ async def auth_v1_token(request: Request):
# venv/lib/python3.9/site-packages/nls_services_lease/test/test_lease_multi_controller.py
@app.post('/leasing/v1/lessor', description='request multiple leases (borrow) for current origin')
async def leasing_v1_lessor(request: Request):
j, token, cur_time = json_loads((await request.body()).decode('utf-8')), __get_token(request), datetime.utcnow()
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.now(UTC)
default_instance = Instance.get_default_instance(db)
jwt_decode_key = default_instance.get_jwt_decode_key()
try:
token = __get_token(request)
token = __get_token(request, jwt_decode_key)
except JWTError:
return JSONr(status_code=401, content={'status': 401, 'detail': 'token is not valid'})
origin_ref = token.get('origin_ref')
scope_ref_list = j.get('scope_ref_list')
logging.info(f'> [ create ]: {origin_ref}: create leases for scope_ref_list {scope_ref_list}')
logger.info(f'> [ create ]: {origin_ref}: create leases for scope_ref_list {scope_ref_list}')
lease_result_list = []
for scope_ref in scope_ref_list:
@@ -385,7 +457,7 @@ async def leasing_v1_lessor(request: Request):
# return JSONr(status_code=500, detail=f'no service instances found for scopes: ["{scope_ref}"]')
lease_ref = str(uuid4())
expires = cur_time + LEASE_EXPIRE_DELTA
expires = cur_time + default_instance.get_lease_expire_delta()
lease_result_list.append({
"ordinal": 0,
# https://docs.nvidia.com/license-system/latest/nvidia-license-system-user-guide/index.html
@@ -393,13 +465,13 @@ async def leasing_v1_lessor(request: Request):
"ref": lease_ref,
"created": cur_time.isoformat(),
"expires": expires.isoformat(),
"recommended_lease_renewal": LEASE_RENEWAL_PERIOD,
"recommended_lease_renewal": default_instance.lease_renewal_period,
"offline_lease": "true",
"license_type": "CONCURRENT_COUNTED_SINGLE"
}
})
data = Lease(origin_ref=origin_ref, lease_ref=lease_ref, lease_created=cur_time, lease_expires=expires)
data = Lease(instance_ref=default_instance.instance_ref, origin_ref=origin_ref, lease_ref=lease_ref, lease_created=cur_time, lease_expires=expires)
Lease.create_or_update(db, data)
response = {
@@ -416,12 +488,19 @@ async def leasing_v1_lessor(request: Request):
# venv/lib/python3.9/site-packages/nls_dal_service_instance_dls/schema/service_instance/V1_0_21__product_mapping.sql
@app.get('/leasing/v1/lessor/leases', description='get active leases for current origin')
async def leasing_v1_lessor_lease(request: Request):
token, cur_time = __get_token(request), datetime.utcnow()
cur_time = datetime.now(UTC)
jwt_decode_key = Instance.get_default_instance(db).get_jwt_decode_key()
try:
token = __get_token(request, jwt_decode_key)
except JWTError:
return JSONr(status_code=401, content={'status': 401, 'detail': 'token is not valid'})
origin_ref = token.get('origin_ref')
active_lease_list = list(map(lambda x: x.lease_ref, Lease.find_by_origin_ref(db, origin_ref)))
logging.info(f'> [ leases ]: {origin_ref}: found {len(active_lease_list)} active leases')
logger.info(f'> [ leases ]: {origin_ref}: found {len(active_lease_list)} active leases')
response = {
"active_lease_list": active_lease_list,
@@ -436,20 +515,28 @@ async def leasing_v1_lessor_lease(request: Request):
# venv/lib/python3.9/site-packages/nls_core_lease/lease_single.py
@app.put('/leasing/v1/lease/{lease_ref}', description='renew a lease')
async def leasing_v1_lease_renew(request: Request, lease_ref: str):
token, cur_time = __get_token(request), datetime.utcnow()
cur_time = datetime.now(UTC)
default_instance = Instance.get_default_instance(db)
jwt_decode_key = default_instance.get_jwt_decode_key()
try:
token = __get_token(request, jwt_decode_key)
except JWTError:
return JSONr(status_code=401, content={'status': 401, 'detail': 'token is not valid'})
origin_ref = token.get('origin_ref')
logging.info(f'> [ renew ]: {origin_ref}: renew {lease_ref}')
logger.info(f'> [ renew ]: {origin_ref}: renew {lease_ref}')
entity = Lease.find_by_origin_ref_and_lease_ref(db, origin_ref, lease_ref)
if entity is None:
return JSONr(status_code=404, content={'status': 404, 'detail': 'requested lease not available'})
expires = cur_time + LEASE_EXPIRE_DELTA
expires = cur_time + default_instance.get_lease_expire_delta()
response = {
"lease_ref": lease_ref,
"expires": expires.isoformat(),
"recommended_lease_renewal": LEASE_RENEWAL_PERIOD,
"recommended_lease_renewal": default_instance.lease_renewal_period,
"offline_lease": True,
"prompts": None,
"sync_timestamp": cur_time.isoformat(),
@@ -463,10 +550,17 @@ async def leasing_v1_lease_renew(request: Request, lease_ref: str):
# venv/lib/python3.9/site-packages/nls_services_lease/test/test_lease_single_controller.py
@app.delete('/leasing/v1/lease/{lease_ref}', description='release (return) a lease')
async def leasing_v1_lease_delete(request: Request, lease_ref: str):
token, cur_time = __get_token(request), datetime.utcnow()
cur_time = datetime.now(UTC)
jwt_decode_key = Instance.get_default_instance(db).get_jwt_decode_key()
try:
token = __get_token(request, jwt_decode_key)
except JWTError:
return JSONr(status_code=401, content={'status': 401, 'detail': 'token is not valid'})
origin_ref = token.get('origin_ref')
logging.info(f'> [ return ]: {origin_ref}: return {lease_ref}')
logger.info(f'> [ return ]: {origin_ref}: return {lease_ref}')
entity = Lease.find_by_lease_ref(db, lease_ref)
if entity.origin_ref != origin_ref:
@@ -489,13 +583,20 @@ async def leasing_v1_lease_delete(request: Request, lease_ref: str):
# venv/lib/python3.9/site-packages/nls_services_lease/test/test_lease_multi_controller.py
@app.delete('/leasing/v1/lessor/leases', description='release all leases')
async def leasing_v1_lessor_lease_remove(request: Request):
token, cur_time = __get_token(request), datetime.utcnow()
cur_time = datetime.now(UTC)
jwt_decode_key = Instance.get_default_instance(db).get_jwt_decode_key()
try:
token = __get_token(request, jwt_decode_key)
except JWTError:
return JSONr(status_code=401, content={'status': 401, 'detail': 'token is not valid'})
origin_ref = token.get('origin_ref')
released_lease_list = list(map(lambda x: x.lease_ref, Lease.find_by_origin_ref(db, origin_ref)))
deletions = Lease.cleanup(db, origin_ref)
logging.info(f'> [ remove ]: {origin_ref}: removed {deletions} leases')
logger.info(f'> [ remove ]: {origin_ref}: removed {deletions} leases')
response = {
"released_lease_list": released_lease_list,
@@ -509,7 +610,9 @@ async def leasing_v1_lessor_lease_remove(request: Request):
@app.post('/leasing/v1/lessor/shutdown', description='shutdown all leases')
async def leasing_v1_lessor_shutdown(request: Request):
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.utcnow()
j, cur_time = json_loads((await request.body()).decode('utf-8')), datetime.now(UTC)
jwt_decode_key = Instance.get_default_instance(db).get_jwt_decode_key()
token = j.get('token')
token = jwt.decode(token=token, key=jwt_decode_key, algorithms=ALGORITHMS.RS256, options={'verify_aud': False})
@@ -517,7 +620,7 @@ async def leasing_v1_lessor_shutdown(request: Request):
released_lease_list = list(map(lambda x: x.lease_ref, Lease.find_by_origin_ref(db, origin_ref)))
deletions = Lease.cleanup(db, origin_ref)
logging.info(f'> [ shutdown ]: {origin_ref}: removed {deletions} leases')
logger.info(f'> [ shutdown ]: {origin_ref}: removed {deletions} leases')
response = {
"released_lease_list": released_lease_list,
@@ -540,7 +643,7 @@ if __name__ == '__main__':
#
###
logging.info(f'> Starting dev-server ...')
logger.info(f'> Starting dev-server ...')
ssl_keyfile = join(dirname(__file__), 'cert/webserver.key')
ssl_certfile = join(dirname(__file__), 'cert/webserver.crt')

View File

@@ -1,19 +1,143 @@
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
import logging
from datetime import datetime, timedelta, timezone, UTC
from os import getenv as env
from os.path import join, dirname, isfile
from sqlalchemy import Column, VARCHAR, CHAR, ForeignKey, DATETIME, update, and_, inspect
from sqlalchemy.ext.declarative import declarative_base
from dateutil.relativedelta import relativedelta
from jose import jwk
from jose.constants import ALGORITHMS
from sqlalchemy import Column, VARCHAR, CHAR, ForeignKey, DATETIME, update, and_, inspect, text, BLOB, INT, FLOAT
from sqlalchemy.engine import Engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import sessionmaker, declarative_base, Session, relationship
from sqlalchemy.schema import CreateTable
from util import DriverMatrix, PrivateKey, PublicKey, DriverMatrix
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
Base = declarative_base()
class Site(Base):
__tablename__ = "site"
INITIAL_SITE_KEY_XID = '10000000-0000-0000-0000-000000000000'
INITIAL_SITE_NAME = 'default-site'
site_key = Column(CHAR(length=36), primary_key=True, unique=True, index=True) # uuid4, SITE_KEY_XID
name = Column(VARCHAR(length=256), nullable=False)
def __str__(self):
return f'SITE_KEY_XID: {self.site_key}'
@staticmethod
def create_statement(engine: Engine):
return CreateTable(Site.__table__).compile(engine)
@staticmethod
def get_default_site(engine: Engine) -> "Site":
session = sessionmaker(bind=engine)()
entity = session.query(Site).filter(Site.site_key == Site.INITIAL_SITE_KEY_XID).first()
session.close()
return entity
class Instance(Base):
__tablename__ = "instance"
DEFAULT_INSTANCE_REF = '10000000-0000-0000-0000-000000000001'
DEFAULT_TOKEN_EXPIRE_DELTA = 86_400 # 1 day
DEFAULT_LEASE_EXPIRE_DELTA = 7_776_000 # 90 days
DEFAULT_LEASE_RENEWAL_PERIOD = 0.15
DEFAULT_CLIENT_TOKEN_EXPIRE_DELTA = 378_432_000 # 12 years
# 1 day = 86400 (min. in production setup, max 90 days), 1 hour = 3600
instance_ref = Column(CHAR(length=36), primary_key=True, unique=True, index=True) # uuid4, INSTANCE_REF
site_key = Column(CHAR(length=36), ForeignKey(Site.site_key, ondelete='CASCADE'), nullable=False, index=True) # uuid4
private_key = Column(BLOB(length=2048), nullable=False)
public_key = Column(BLOB(length=512), nullable=False)
token_expire_delta = Column(INT(), nullable=False, default=DEFAULT_TOKEN_EXPIRE_DELTA, comment='in seconds')
lease_expire_delta = Column(INT(), nullable=False, default=DEFAULT_LEASE_EXPIRE_DELTA, comment='in seconds')
lease_renewal_period = Column(FLOAT(precision=2), nullable=False, default=DEFAULT_LEASE_RENEWAL_PERIOD)
client_token_expire_delta = Column(INT(), nullable=False, default=DEFAULT_CLIENT_TOKEN_EXPIRE_DELTA, comment='in seconds')
__origin = relationship(Site, foreign_keys=[site_key])
def __str__(self):
return f'INSTANCE_REF: {self.instance_ref} (SITE_KEY_XID: {self.site_key})'
@staticmethod
def create_statement(engine: Engine):
return CreateTable(Instance.__table__).compile(engine)
@staticmethod
def create_or_update(engine: Engine, instance: "Instance"):
session = sessionmaker(bind=engine)()
entity = session.query(Instance).filter(Instance.instance_ref == instance.instance_ref).first()
if entity is None:
session.add(instance)
else:
x = dict(
site_key=instance.site_key,
private_key=instance.private_key,
public_key=instance.public_key,
token_expire_delta=instance.token_expire_delta,
lease_expire_delta=instance.lease_expire_delta,
lease_renewal_period=instance.lease_renewal_period,
client_token_expire_delta=instance.client_token_expire_delta,
)
session.execute(update(Instance).where(Instance.instance_ref == instance.instance_ref).values(**x))
session.commit()
session.flush()
session.close()
# todo: validate on startup that "lease_expire_delta" is between 1 day and 90 days
@staticmethod
def get_default_instance(engine: Engine) -> "Instance":
session = sessionmaker(bind=engine)()
site = Site.get_default_site(engine)
entity = session.query(Instance).filter(Instance.site_key == site.site_key).first()
session.close()
return entity
def get_token_expire_delta(self) -> "dateutil.relativedelta.relativedelta":
return relativedelta(seconds=self.token_expire_delta)
def get_lease_expire_delta(self) -> "dateutil.relativedelta.relativedelta":
return relativedelta(seconds=self.lease_expire_delta)
def get_lease_renewal_delta(self) -> "datetime.timedelta":
return timedelta(seconds=self.lease_expire_delta)
def get_client_token_expire_delta(self) -> "dateutil.relativedelta.relativedelta":
return relativedelta(seconds=self.client_token_expire_delta)
def __get_private_key(self) -> "PrivateKey":
return PrivateKey(self.private_key)
def get_public_key(self) -> "PublicKey":
return PublicKey(self.public_key)
def get_jwt_encode_key(self) -> "jose.jkw":
return jwk.construct(self.__get_private_key().pem().decode('utf-8'), algorithm=ALGORITHMS.RS256)
def get_jwt_decode_key(self) -> "jose.jwt":
return jwk.construct(self.get_public_key().pem().decode('utf-8'), algorithm=ALGORITHMS.RS256)
def get_private_key_str(self, encoding: str = 'utf-8') -> str:
return self.private_key.decode(encoding)
def get_public_key_str(self, encoding: str = 'utf-8') -> str:
return self.private_key.decode(encoding)
class Origin(Base):
__tablename__ = "origin"
origin_ref = Column(CHAR(length=36), primary_key=True, unique=True, index=True) # uuid4
# service_instance_xid = Column(CHAR(length=36), nullable=False, index=True) # uuid4 # not necessary, we only support one service_instance_xid ('INSTANCE_REF')
hostname = Column(VARCHAR(length=256), nullable=True)
guest_driver_version = Column(VARCHAR(length=10), nullable=True)
@@ -24,6 +148,8 @@ class Origin(Base):
return f'Origin(origin_ref={self.origin_ref}, hostname={self.hostname})'
def serialize(self) -> dict:
_ = DriverMatrix().find(self.guest_driver_version)
return {
'origin_ref': self.origin_ref,
# 'service_instance_xid': self.service_instance_xid,
@@ -31,11 +157,11 @@ class Origin(Base):
'guest_driver_version': self.guest_driver_version,
'os_platform': self.os_platform,
'os_version': self.os_version,
'$driver': _ if _ is not None else None,
}
@staticmethod
def create_statement(engine: Engine):
from sqlalchemy.schema import CreateTable
return CreateTable(Origin.__table__).compile(engine)
@staticmethod
@@ -57,12 +183,22 @@ class Origin(Base):
session.close()
@staticmethod
def delete(engine: Engine, origins: ["Origin"] = None) -> int:
def delete(engine: Engine, origin_refs: [str] = None) -> int:
session = sessionmaker(bind=engine)()
if origins is None:
if origin_refs is None:
deletions = session.query(Origin).delete()
else:
deletions = session.query(Origin).filter(Origin.origin_ref in origins).delete()
deletions = session.query(Origin).filter(Origin.origin_ref.in_(origin_refs)).delete()
session.commit()
session.close()
return deletions
@staticmethod
def delete_expired(engine: Engine) -> int:
session = sessionmaker(bind=engine)()
origins = session.query(Origin).join(Lease, Origin.origin_ref == Lease.origin_ref, isouter=True).filter(Lease.lease_ref.is_(None)).all()
origin_refs = [origin.origin_ref for origin in origins]
deletions = session.query(Origin).filter(Origin.origin_ref.in_(origin_refs)).delete()
session.commit()
session.close()
return deletions
@@ -71,18 +207,24 @@ class Origin(Base):
class Lease(Base):
__tablename__ = "lease"
instance_ref = Column(CHAR(length=36), ForeignKey(Instance.instance_ref, ondelete='CASCADE'), nullable=False, index=True) # uuid4
lease_ref = Column(CHAR(length=36), primary_key=True, nullable=False, index=True) # uuid4
origin_ref = Column(CHAR(length=36), ForeignKey(Origin.origin_ref, ondelete='CASCADE'), nullable=False, index=True) # uuid4
# scope_ref = Column(CHAR(length=36), nullable=False, index=True) # uuid4 # not necessary, we only support one scope_ref ('ALLOTMENT_REF')
lease_created = Column(DATETIME(), nullable=False)
lease_expires = Column(DATETIME(), nullable=False)
lease_updated = Column(DATETIME(), nullable=False)
__instance = relationship(Instance, foreign_keys=[instance_ref])
__origin = relationship(Origin, foreign_keys=[origin_ref])
def __repr__(self):
return f'Lease(origin_ref={self.origin_ref}, lease_ref={self.lease_ref}, expires={self.lease_expires})'
def serialize(self, renewal_period: float, renewal_delta: timedelta) -> dict:
def serialize(self) -> dict:
renewal_period = self.__instance.lease_renewal_period
renewal_delta = self.__instance.get_lease_renewal_delta
lease_renewal = int(Lease.calculate_renewal(renewal_period, renewal_delta).total_seconds())
lease_renewal = self.lease_updated + relativedelta(seconds=lease_renewal)
@@ -90,15 +232,14 @@ class Lease(Base):
'lease_ref': self.lease_ref,
'origin_ref': self.origin_ref,
# 'scope_ref': self.scope_ref,
'lease_created': self.lease_created.isoformat(),
'lease_expires': self.lease_expires.isoformat(),
'lease_updated': self.lease_updated.isoformat(),
'lease_renewal': lease_renewal.isoformat(),
'lease_created': self.lease_created.replace(tzinfo=timezone.utc).isoformat(),
'lease_expires': self.lease_expires.replace(tzinfo=timezone.utc).isoformat(),
'lease_updated': self.lease_updated.replace(tzinfo=timezone.utc).isoformat(),
'lease_renewal': lease_renewal.replace(tzinfo=timezone.utc).isoformat(),
}
@staticmethod
def create_statement(engine: Engine):
from sqlalchemy.schema import CreateTable
return CreateTable(Lease.__table__).compile(engine)
@staticmethod
@@ -161,6 +302,14 @@ class Lease(Base):
session.close()
return deletions
@staticmethod
def delete_expired(engine: Engine) -> int:
session = sessionmaker(bind=engine)()
deletions = session.query(Lease).filter(Lease.lease_expires <= datetime.now(UTC)).delete()
session.commit()
session.close()
return deletions
@staticmethod
def calculate_renewal(renewal_period: float, delta: timedelta) -> timedelta:
"""
@@ -170,44 +319,118 @@ class Lease(Base):
renew = delta.total_seconds() * LEASE_RENEWAL_PERIOD
renew = datetime.timedelta(seconds=renew)
expires = delta - renew # 19.2
import datetime
LEASE_RENEWAL_PERIOD=0.15 # 15%
delta = datetime.timedelta(days=90)
renew = delta.total_seconds() * LEASE_RENEWAL_PERIOD
renew = datetime.timedelta(seconds=renew)
expires = delta - renew # 76 days, 12:00:00 hours
"""
renew = delta.total_seconds() * renewal_period
renew = timedelta(seconds=renew)
return renew
def init_default_site(session: Session):
private_key = PrivateKey.generate()
public_key = private_key.public_key()
site = Site(
site_key=Site.INITIAL_SITE_KEY_XID,
name=Site.INITIAL_SITE_NAME
)
session.add(site)
session.commit()
instance = Instance(
instance_ref=Instance.DEFAULT_INSTANCE_REF,
site_key=site.site_key,
private_key=private_key.pem(),
public_key=public_key.pem(),
)
session.add(instance)
session.commit()
def init(engine: Engine):
tables = [Origin, Lease]
tables = [Site, Instance, Origin, Lease]
db = inspect(engine)
session = sessionmaker(bind=engine)()
for table in tables:
if not db.dialect.has_table(engine.connect(), table.__tablename__):
session.execute(str(table.create_statement(engine)))
exists = db.dialect.has_table(engine.connect(), table.__tablename__)
logger.info(f'> Table "{table.__tablename__:<16}" exists: {exists}')
if not exists:
session.execute(text(str(table.create_statement(engine))))
session.commit()
# create default site
cnt = session.query(Site).count()
if cnt == 0:
init_default_site(session)
session.flush()
session.close()
def migrate(engine: Engine):
db = inspect(engine)
def upgrade_1_0_to_1_1():
x = db.dialect.get_columns(engine.connect(), Lease.__tablename__)
x = next(_ for _ in x if _['name'] == 'origin_ref')
if x['primary_key'] > 0:
print('Found old database schema with "origin_ref" as primary-key in "lease" table. Dropping table!')
print(' Your leases are recreated on next renewal!')
print(' If an error message appears on the client, you can ignore it.')
Lease.__table__.drop(bind=engine)
init(engine)
# todo: add update guide to use 1.LATEST to 2.0
def upgrade_1_x_to_2_0():
site = Site.get_default_site(engine)
logger.info(site)
instance = Instance.get_default_instance(engine)
logger.info(instance)
# def upgrade_1_2_to_1_3():
# x = db.dialect.get_columns(engine.connect(), Lease.__tablename__)
# x = next((_ for _ in x if _['name'] == 'scope_ref'), None)
# if x is None:
# Lease.scope_ref.compile()
# column_name = Lease.scope_ref.name
# column_type = Lease.scope_ref.type.compile(engine.dialect)
# engine.execute(f'ALTER TABLE "{Lease.__tablename__}" ADD COLUMN "{column_name}" {column_type}')
# SITE_KEY_XID
if site_key := env('SITE_KEY_XID', None) is not None:
site.site_key = str(site_key)
upgrade_1_0_to_1_1()
# upgrade_1_2_to_1_3()
# INSTANCE_REF
if instance_ref := env('INSTANCE_REF', None) is not None:
instance.instance_ref = str(instance_ref)
# ALLOTMENT_REF
if allotment_ref := env('ALLOTMENT_REF', None) is not None:
pass # todo
# INSTANCE_KEY_RSA, INSTANCE_KEY_PUB
default_instance_private_key_path = str(join(dirname(__file__), 'cert/instance.private.pem'))
instance_private_key = env('INSTANCE_KEY_RSA', None)
if instance_private_key is not None:
instance.private_key = PrivateKey(instance_private_key.encode('utf-8'))
elif isfile(default_instance_private_key_path):
instance.private_key = PrivateKey.from_file(default_instance_private_key_path)
default_instance_public_key_path = str(join(dirname(__file__), 'cert/instance.public.pem'))
instance_public_key = env('INSTANCE_KEY_PUB', None)
if instance_public_key is not None:
instance.public_key = PublicKey(instance_public_key.encode('utf-8'))
elif isfile(default_instance_public_key_path):
instance.public_key = PublicKey.from_file(default_instance_public_key_path)
# TOKEN_EXPIRE_DELTA
token_expire_delta = env('TOKEN_EXPIRE_DAYS', None)
if token_expire_delta not in (None, 0):
instance.token_expire_delta = token_expire_delta * 86_400
token_expire_delta = env('TOKEN_EXPIRE_HOURS', None)
if token_expire_delta not in (None, 0):
instance.token_expire_delta = token_expire_delta * 3_600
# LEASE_EXPIRE_DELTA, LEASE_RENEWAL_DELTA
lease_expire_delta = env('LEASE_EXPIRE_DAYS', None)
if lease_expire_delta not in (None, 0):
instance.lease_expire_delta = lease_expire_delta * 86_400
lease_expire_delta = env('LEASE_EXPIRE_HOURS', None)
if lease_expire_delta not in (None, 0):
instance.lease_expire_delta = lease_expire_delta * 3_600
# LEASE_RENEWAL_PERIOD
lease_renewal_period = env('LEASE_RENEWAL_PERIOD', None)
if lease_renewal_period is not None:
instance.lease_renewal_period = lease_renewal_period
# todo: update site, instance
upgrade_1_x_to_2_0()

View File

@@ -1,28 +1,132 @@
def load_file(filename) -> bytes:
import logging
from json import load as json_load
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey, RSAPublicKey, generate_private_key
from cryptography.hazmat.primitives.serialization import load_pem_private_key, load_pem_public_key
logging.basicConfig()
def load_file(filename: str) -> bytes:
log = logging.getLogger(f'{__name__}')
log.debug(f'Loading contents of file "{filename}')
with open(filename, 'rb') as file:
content = file.read()
return content
def load_key(filename) -> "RsaKey":
try:
# Crypto | Cryptodome on Debian
from Crypto.PublicKey import RSA
from Crypto.PublicKey.RSA import RsaKey
except ModuleNotFoundError:
from Cryptodome.PublicKey import RSA
from Cryptodome.PublicKey.RSA import RsaKey
class PrivateKey:
return RSA.import_key(extern_key=load_file(filename), passphrase=None)
def __init__(self, data: bytes):
self.__key = load_pem_private_key(data, password=None)
@staticmethod
def from_file(filename: str) -> "PrivateKey":
log = logging.getLogger(__name__)
log.debug(f'Importing RSA-Private-Key from "{filename}"')
with open(filename, 'rb') as f:
data = f.read()
return PrivateKey(data=data.strip())
def raw(self) -> RSAPrivateKey:
return self.__key
def pem(self) -> bytes:
return self.__key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption()
)
def public_key(self) -> "PublicKey":
data = self.__key.public_key().public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo
)
return PublicKey(data=data)
@staticmethod
def generate(public_exponent: int = 65537, key_size: int = 2048) -> "PrivateKey":
log = logging.getLogger(__name__)
log.debug(f'Generating RSA-Key')
key = generate_private_key(public_exponent=public_exponent, key_size=key_size)
data = key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption()
)
return PrivateKey(data=data)
def generate_key() -> "RsaKey":
try:
# Crypto | Cryptodome on Debian
from Crypto.PublicKey import RSA
from Crypto.PublicKey.RSA import RsaKey
except ModuleNotFoundError:
from Cryptodome.PublicKey import RSA
from Cryptodome.PublicKey.RSA import RsaKey
class PublicKey:
return RSA.generate(bits=2048)
def __init__(self, data: bytes):
self.__key = load_pem_public_key(data)
@staticmethod
def from_file(filename: str) -> "PublicKey":
log = logging.getLogger(__name__)
log.debug(f'Importing RSA-Public-Key from "{filename}"')
with open(filename, 'rb') as f:
data = f.read()
return PublicKey(data=data.strip())
def raw(self) -> RSAPublicKey:
return self.__key
def pem(self) -> bytes:
return self.__key.public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo
)
class DriverMatrix:
__DRIVER_MATRIX_FILENAME = 'static/driver_matrix.json'
__DRIVER_MATRIX: None | dict = None # https://docs.nvidia.com/grid/ => "Driver Versions"
def __init__(self):
self.log = logging.getLogger(self.__class__.__name__)
if DriverMatrix.__DRIVER_MATRIX is None:
self.__load()
def __load(self):
try:
file = open(DriverMatrix.__DRIVER_MATRIX_FILENAME)
DriverMatrix.__DRIVER_MATRIX = json_load(file)
file.close()
self.log.debug(f'Successfully loaded "{DriverMatrix.__DRIVER_MATRIX_FILENAME}".')
except Exception as e:
DriverMatrix.__DRIVER_MATRIX = {} # init empty dict to not try open file everytime, just when restarting app
# self.log.warning(f'Failed to load "{DriverMatrix.__DRIVER_MATRIX_FILENAME}": {e}')
@staticmethod
def find(version: str) -> dict | None:
if DriverMatrix.__DRIVER_MATRIX is None:
return None
for idx, (key, branch) in enumerate(DriverMatrix.__DRIVER_MATRIX.items()):
for release in branch.get('$releases'):
linux_driver = release.get('Linux Driver')
windows_driver = release.get('Windows Driver')
if version == linux_driver or version == windows_driver:
tmp = branch.copy()
tmp.pop('$releases')
is_latest = release.get('vGPU Software') == branch.get('Latest Release in Branch')
return {
'software_branch': branch.get('vGPU Software Branch'),
'branch_version': release.get('vGPU Software'),
'driver_branch': branch.get('Driver Branch'),
'branch_status': branch.get('vGPU Branch Status'),
'release_date': release.get('Release Date'),
'eol': branch.get('EOL Date') if is_latest else None,
'is_latest': is_latest,
}
return None

View File

@@ -1,26 +0,0 @@
# Database structure
## `request_routing.service_instance`
| xid | org_name |
|----------------------------------------|--------------------------|
| `10000000-0000-0000-0000-000000000000` | `lic-000000000000000000` |
- `xid` is used as `SERVICE_INSTANCE_XID`
## `request_routing.license_allotment_service_instance`
| xid | service_instance_xid | license_allotment_xid |
|----------------------------------------|----------------------------------------|----------------------------------------|
| `90000000-0000-0000-0000-000000000001` | `10000000-0000-0000-0000-000000000000` | `80000000-0000-0000-0000-000000000001` |
- `xid` is only a primary-key and never used as foreign-key or reference
- `license_allotment_xid` must be used to fetch `xid`'s from `request_routing.license_allotment_reference`
## `request_routing.license_allotment_reference`
| xid | license_allotment_xid |
|----------------------------------------|----------------------------------------|
| `20000000-0000-0000-0000-000000000001` | `80000000-0000-0000-0000-000000000001` |
- `xid` is used as `scope_ref_list` on token request

View File

@@ -1,177 +0,0 @@
# Reverse Engineering Notes
# Usefully commands
## Check licensing status
- `nvidia-smi -q | grep "License"`
**Output**
```
vGPU Software Licensed Product
License Status : Licensed (Expiry: 2023-1-14 12:59:52 GMT)
```
## Track licensing progress
- NVIDIA Grid Log: `journalctl -u nvidia-gridd -f`
```
systemd[1]: Started NVIDIA Grid Daemon.
nvidia-gridd[2986]: Configuration parameter ( ServerAddress ) not set
nvidia-gridd[2986]: vGPU Software package (0)
nvidia-gridd[2986]: Ignore service provider and node-locked licensing
nvidia-gridd[2986]: NLS initialized
nvidia-gridd[2986]: Acquiring license. (Info: license.nvidia.space; NVIDIA RTX Virtual Workstation)
nvidia-gridd[2986]: License acquired successfully. (Info: license.nvidia.space, NVIDIA RTX Virtual Workstation; Expiry: 2023-1-29 22:3:0 GMT)
```
# DLS-Container File-System (Docker)
## Configuration data
Most variables and configs are stored in `/var/lib/docker/volumes/configurations/_data`.
Files can be modified with `docker cp <container-id>:/venv/... /opt/localfile/...` and back.
(May you need to fix permissions with `docker exec -u 0 <container-id> chown nonroot:nonroot /venv/...`)
## Dive / Docker image inspector
- `dive dls:appliance`
The source code is stored in `/venv/lib/python3.9/site-packages/nls_*`.
Image-Reference:
```
Tags: (unavailable)
Id: d1c7976a5d2b3681ff6c5a30f8187e4015187a83f3f285ba4a37a45458bd6b98
Digest: sha256:311223c5af7a298ec1104f5dc8c3019bfb0e1f77256dc3d995244ffb295a97
1f
Command:
#(nop) ADD file:c1900d3e3a29c29a743a8da86c437006ec5d2aa873fb24e48033b6bf492bb37b in /
```
## Private Key (Site-Key)
- `/etc/dls/config/decryptor/decryptor`
```shell
docker exec -it <container-id> /etc/dls/config/decryptor/decryptor > /tmp/private-key.pem
```
```
-----BEGIN RSA PRIVATE KEY-----
...
-----END RSA PRIVATE KEY-----
```
## Site Key Uri - `/etc/dls/config/site_key_uri.bin`
```
base64-content...
```
## DB Password - `/etc/dls/config/dls_db_password.bin`
```
base64-content...
```
**Decrypt database password**
```
cd /var/lib/docker/volumes/configurations/_data
cat dls_db_password.bin | base64 -d > dls_db_password.bin.raw
openssl rsautl -decrypt -inkey /tmp/private-key.pem -in dls_db_password.bin.raw
```
# Database
- It's enough to manipulate database licenses. There must not be changed any line of code to bypass licensing
validations.
# Logging / Stack Trace
- https://docs.nvidia.com/license-system/latest/nvidia-license-system-user-guide/index.html#troubleshooting-dls-instance
**Failed licensing log**
```
{
"activity": 100,
"context": {
"SERVICE_INSTANCE_ID": "b43d6e46-d6d0-4943-8b8d-c66a5f6e0d38",
"SERVICE_INSTANCE_NAME": "DEFAULT_2022-12-14_12:48:30",
"description": "borrow failed: NotFoundError(no pool features found for: NVIDIA RTX Virtual Workstation)",
"event_type": null,
"function_name": "_evt",
"lineno": 54,
"module_name": "nls_dal_lease_dls.event",
"operation_id": "e72a8ca7-34cc-4e11-b80c-273592085a24",
"origin_ref": "3f7f5a50-a26b-425b-8d5e-157f63e72b1c",
"service_name": "nls_services_lease"
},
"detail": {
"oc": {
"license_allotment_xid": "10c4317f-7c4c-11ed-a524-0e4252a7e5f1",
"origin_ref": "3f7f5a50-a26b-425b-8d5e-157f63e72b1c",
"service_instance_xid": "b43d6e46-d6d0-4943-8b8d-c66a5f6e0d38"
},
"operation_id": "e72a8ca7-34cc-4e11-b80c-273592085a24"
},
"id": "0cc9e092-3b92-4652-8d9e-7622ef85dc79",
"metadata": {},
"ts": "2022-12-15T10:25:36.827661Z"
}
{
"activity": 400,
"context": {
"SERVICE_INSTANCE_ID": "b43d6e46-d6d0-4943-8b8d-c66a5f6e0d38",
"SERVICE_INSTANCE_NAME": "DEFAULT_2022-12-14_12:48:30",
"description": "lease_multi_create failed: no pool features found for: NVIDIA RTX Virtual Workstation",
"event_by": "system",
"function_name": "lease_multi_create",
"level": "warning",
"lineno": 157,
"module_name": "nls_services_lease.controllers.lease_multi_controller",
"operation_id": "e72a8ca7-34cc-4e11-b80c-273592085a24",
"service_name": "nls_services_lease"
},
"detail": {
"_msg": "lease_multi_create failed: no pool features found for: NVIDIA RTX Virtual Workstation",
"exec_info": ["NotFoundError", "NotFoundError(no pool features found for: NVIDIA RTX Virtual Workstation)", " File \"/venv/lib/python3.9/site-packages/nls_services_lease/controllers/lease_multi_controller.py\", line 127, in lease_multi_create\n data = _leaseMulti.lease_multi_create(event_args)\n File \"/venv/lib/python3.9/site-packages/nls_core_lease/lease_multi.py\", line 208, in lease_multi_create\n raise e\n File \"/venv/lib/python3.9/site-packages/nls_core_lease/lease_multi.py\", line 184, in lease_multi_create\n self._try_proposals(oc, mlr, results, detail)\n File \"/venv/lib/python3.9/site-packages/nls_core_lease/lease_multi.py\", line 219, in _try_proposals\n lease = self._leases.create(creator)\n File \"/venv/lib/python3.9/site-packages/nls_dal_lease_dls/leases.py\", line 230, in create\n features = self._get_features(creator)\n File \"/venv/lib/python3.9/site-packages/nls_dal_lease_dls/leases.py\", line 148, in _get_features\n self._explain_not_available(cur, creator)\n File \"/venv/lib/python3.9/site-packages/nls_dal_lease_dls/leases.py\", line 299, in _explain_not_available\n raise NotFoundError(f'no pool features found for: {lcc.product_name}')\n"],
"operation_id": "e72a8ca7-34cc-4e11-b80c-273592085a24"
},
"id": "282801b9-d612-40a5-9145-b56d8e420dac",
"metadata": {},
"ts": "2022-12-15T10:25:36.831673Z"
}
```
**Stack Trace**
```
"NotFoundError", "NotFoundError(no pool features found for: NVIDIA RTX Virtual Workstation)", " File \"/venv/lib/python3.9/site-packages/nls_services_lease/controllers/lease_multi_controller.py\", line 127, in lease_multi_create
data = _leaseMulti.lease_multi_create(event_args)
File \"/venv/lib/python3.9/site-packages/nls_core_lease/lease_multi.py\", line 208, in lease_multi_create
raise e
File \"/venv/lib/python3.9/site-packages/nls_core_lease/lease_multi.py\", line 184, in lease_multi_create
self._try_proposals(oc, mlr, results, detail)
File \"/venv/lib/python3.9/site-packages/nls_core_lease/lease_multi.py\", line 219, in _try_proposals
lease = self._leases.create(creator)
File \"/venv/lib/python3.9/site-packages/nls_dal_lease_dls/leases.py\", line 230, in create
features = self._get_features(creator)
File \"/venv/lib/python3.9/site-packages/nls_dal_lease_dls/leases.py\", line 148, in _get_features
self._explain_not_available(cur, creator)
File \"/venv/lib/python3.9/site-packages/nls_dal_lease_dls/leases.py\", line 299, in _explain_not_available
raise NotFoundError(f'no pool features found for: {lcc.product_name}')
"
```
# Nginx
- NGINX uses `/opt/certs/cert.pem` and `/opt/certs/key.pem`

View File

@@ -1,9 +1,10 @@
version: '3.9'
x-dls-variables: &dls-variables
DLS_URL: localhost # REQUIRED, change to your ip or hostname
DLS_PORT: 443 # must match nginx listen & exposed port
LEASE_EXPIRE_DAYS: 90
TZ: Europe/Berlin # REQUIRED, set your timezone correctly on fastapi-dls AND YOUR CLIENTS !!!
DLS_URL: localhost # REQUIRED, change to your ip or hostname
DLS_PORT: 443
LEASE_EXPIRE_DAYS: 90 # 90 days is maximum
DATABASE: sqlite:////app/database/db.sqlite
DEBUG: false
@@ -13,106 +14,16 @@ services:
restart: always
environment:
<<: *dls-variables
volumes:
- /opt/docker/fastapi-dls/cert:/app/cert # instance.private.pem, instance.public.pem
- db:/app/database
entrypoint: ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000", "--app-dir", "/app", "--proxy-headers"]
healthcheck:
test: ["CMD", "curl", "--fail", "http://localhost:8000/-/health"]
interval: 10s
timeout: 5s
retries: 3
start_period: 30s
proxy:
image: nginx
ports:
# thees are ports where nginx (!) is listen to
- "80:80" # for "/leasing/v1/lessor/shutdown" used by windows guests, can't be changed!
- "443:443" # first part must match "DLS_PORT"
- "443:443"
volumes:
- /opt/docker/fastapi-dls/cert:/opt/cert
healthcheck:
test: ["CMD", "curl", "--insecure", "--fail", "https://localhost/-/health"]
interval: 10s
timeout: 5s
retries: 3
start_period: 30s
command: |
bash -c "bash -s <<\"EOF\"
cat > /etc/nginx/nginx.conf <<\"EON\"
daemon off;
user root;
worker_processes auto;
events {
worker_connections 1024;
}
http {
gzip on;
gzip_disable "msie6";
include /etc/nginx/mime.types;
upstream dls-backend {
server dls:8000; # must match dls listen port
}
server {
listen 443 ssl http2 default_server;
listen [::]:443 ssl http2 default_server;
root /var/www/html;
index index.html;
server_name _;
ssl_certificate "/opt/cert/webserver.crt";
ssl_certificate_key "/opt/cert/webserver.key";
ssl_session_cache shared:SSL:1m;
ssl_session_timeout 10m;
ssl_protocols TLSv1.3 TLSv1.2;
# ssl_ciphers "ECDHE-ECDSA-CHACHA20-POLY1305";
# ssl_ciphers PROFILE=SYSTEM;
ssl_prefer_server_ciphers on;
location / {
proxy_set_header Host $$http_host;
proxy_set_header X-Real-IP $$remote_addr;
proxy_set_header X-Forwarded-For $$proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $$scheme;
proxy_pass http://dls-backend$$request_uri;
}
location = /-/health {
access_log off;
add_header 'Content-Type' 'application/json';
return 200 '{\"status\":\"up\",\"service\":\"nginx\"}';
}
}
server {
listen 80;
listen [::]:80;
root /var/www/html;
index index.html;
server_name _;
location /leasing/v1/lessor/shutdown {
proxy_set_header Host $$http_host;
proxy_set_header X-Real-IP $$remote_addr;
proxy_set_header X-Forwarded-For $$proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $$scheme;
proxy_pass http://dls-backend/leasing/v1/lessor/shutdown;
}
location / {
return 301 https://$$host$$request_uri;
}
}
}
EON
nginx
EOF"
- /opt/docker/fastapi-dls/cert:/app/cert
- dls-db:/app/database
logging: # optional, for those who do not need logs
driver: "json-file"
options:
max-file: 5
max-size: 10m
volumes:
db:
dls-db:

View File

@@ -0,0 +1,120 @@
version: '3.9'
x-dls-variables: &dls-variables
DLS_URL: localhost # REQUIRED, change to your ip or hostname
DLS_PORT: 443 # must match nginx listen & exposed port
LEASE_EXPIRE_DAYS: 90
DATABASE: sqlite:////app/database/db.sqlite
DEBUG: false
services:
dls:
image: collinwebdesigns/fastapi-dls:latest
restart: always
environment:
<<: *dls-variables
volumes:
- /etc/timezone:/etc/timezone:ro
- /opt/docker/fastapi-dls/cert:/app/cert # instance.private.pem, instance.public.pem
- db:/app/database
entrypoint: ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000", "--app-dir", "/app", "--proxy-headers"]
healthcheck:
test: ["CMD", "curl", "--fail", "http://localhost:8000/-/health"]
interval: 10s
timeout: 5s
retries: 3
start_period: 30s
proxy:
image: nginx
ports:
# thees are ports where nginx (!) is listen to
- "80:80" # for "/leasing/v1/lessor/shutdown" used by windows guests, can't be changed!
- "443:443" # first part must match "DLS_PORT"
volumes:
- /etc/timezone:/etc/timezone:ro
- /opt/docker/fastapi-dls/cert:/opt/cert
healthcheck:
test: ["CMD", "curl", "--insecure", "--fail", "https://localhost/-/health"]
interval: 10s
timeout: 5s
retries: 3
start_period: 30s
command: |
bash -c "bash -s <<\"EOF\"
cat > /etc/nginx/nginx.conf <<\"EON\"
daemon off;
user root;
worker_processes auto;
events {
worker_connections 1024;
}
http {
gzip on;
gzip_disable "msie6";
include /etc/nginx/mime.types;
upstream dls-backend {
server dls:8000; # must match dls listen port
}
server {
listen 443 ssl http2 default_server;
listen [::]:443 ssl http2 default_server;
root /var/www/html;
index index.html;
server_name _;
ssl_certificate "/opt/cert/webserver.crt";
ssl_certificate_key "/opt/cert/webserver.key";
ssl_session_cache shared:SSL:1m;
ssl_session_timeout 10m;
ssl_protocols TLSv1.3 TLSv1.2;
# ssl_ciphers "ECDHE-ECDSA-CHACHA20-POLY1305";
# ssl_ciphers PROFILE=SYSTEM;
ssl_prefer_server_ciphers on;
location / {
proxy_set_header Host $$http_host;
proxy_set_header X-Real-IP $$remote_addr;
proxy_set_header X-Forwarded-For $$proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $$scheme;
proxy_pass http://dls-backend$$request_uri;
}
location = /-/health {
access_log off;
add_header 'Content-Type' 'application/json';
return 200 '{\"status\":\"up\",\"service\":\"nginx\"}';
}
}
server {
listen 80;
listen [::]:80;
root /var/www/html;
index index.html;
server_name _;
location /leasing/v1/lessor/shutdown {
proxy_set_header Host $$http_host;
proxy_set_header X-Real-IP $$remote_addr;
proxy_set_header X-Forwarded-For $$proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $$scheme;
proxy_pass http://dls-backend/leasing/v1/lessor/shutdown;
}
location / {
return 301 https://$$host$$request_uri;
}
}
}
EON
nginx
EOF"
volumes:
db:

View File

@@ -1,8 +1,8 @@
fastapi==0.89.1
uvicorn[standard]==0.20.0
python-jose==3.3.0
pycryptodome==3.16.0
python-dateutil==2.8.2
sqlalchemy==1.4.46
markdown==3.4.1
python-dotenv==0.21.0
fastapi==0.115.12
uvicorn[standard]==0.34.1
python-jose[cryptography]==3.4.0
cryptography==44.0.2
python-dateutil==2.9.0
sqlalchemy==2.0.40
markdown==3.8
python-dotenv==1.1.0

View File

@@ -0,0 +1,123 @@
import logging
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
URL = 'https://docs.nvidia.com/vgpu/index.html'
BRANCH_STATUS_KEY = 'vGPU Branch Status'
VGPU_KEY, GRID_KEY, DRIVER_BRANCH_KEY = 'vGPU Software', 'vGPU Software', 'Driver Branch'
LINUX_VGPU_MANAGER_KEY, LINUX_DRIVER_KEY = 'Linux vGPU Manager', 'Linux Driver'
WINDOWS_VGPU_MANAGER_KEY, WINDOWS_DRIVER_KEY = 'Windows vGPU Manager', 'Windows Driver'
ALT_VGPU_MANAGER_KEY = 'vGPU Manager'
RELEASE_DATE_KEY, LATEST_KEY, EOL_KEY = 'Release Date', 'Latest Release in Branch', 'EOL Date'
JSON_RELEASES_KEY = '$releases'
def __driver_versions(html: 'BeautifulSoup'):
def __strip(_: str) -> str:
# removes content after linebreak (e.g. "Hello\n World" to "Hello")
_ = _.strip()
tmp = _.split('\n')
if len(tmp) > 0:
return tmp[0]
return _
# find wrapper for "DriverVersions" and find tables
data = html.find('div', {'id': 'driver-versions'})
items = data.find_all('bsp-accordion', {'class': 'Accordion-items-item'})
for item in items:
software_branch = item.find('div', {'class': 'Accordion-items-item-title'}).text.strip()
software_branch = software_branch.replace(' Releases', '')
matrix_key = software_branch.lower()
branch_status = item.find('a', href=True, string='Branch status')
branch_status = branch_status.next_sibling.replace(':', '').strip()
# driver version info from table-heads (ths) and table-rows (trs)
table = item.find('table')
ths, trs = table.find_all('th'), table.find_all('tr')
headers, releases = [header.text.strip() for header in ths], []
for trs in trs:
tds = trs.find_all('td')
if len(tds) == 0: # skip empty
continue
# create dict with table-heads as key and cell content as value
x = {headers[i]: __strip(cell.text) for i, cell in enumerate(tds)}
x.setdefault(BRANCH_STATUS_KEY, branch_status)
releases.append(x)
# add to matrix
MATRIX.update({matrix_key: {JSON_RELEASES_KEY: releases}})
def __debug():
# print table head
s = f'{VGPU_KEY:^13} | {LINUX_VGPU_MANAGER_KEY:^21} | {LINUX_DRIVER_KEY:^21} | {WINDOWS_VGPU_MANAGER_KEY:^21} | {WINDOWS_DRIVER_KEY:^21} | {RELEASE_DATE_KEY:>21} | {BRANCH_STATUS_KEY:^21}'
print(s)
# iterate over dict & format some variables to not overload table
for idx, (key, branch) in enumerate(MATRIX.items()):
for release in branch.get(JSON_RELEASES_KEY):
version = release.get(VGPU_KEY, release.get(GRID_KEY, ''))
linux_manager = release.get(LINUX_VGPU_MANAGER_KEY, release.get(ALT_VGPU_MANAGER_KEY, ''))
linux_driver = release.get(LINUX_DRIVER_KEY)
windows_manager = release.get(WINDOWS_VGPU_MANAGER_KEY, release.get(ALT_VGPU_MANAGER_KEY, ''))
windows_driver = release.get(WINDOWS_DRIVER_KEY)
release_date = release.get(RELEASE_DATE_KEY)
is_latest = release.get(VGPU_KEY) == branch.get(LATEST_KEY)
branch_status = __parse_branch_status(release.get(BRANCH_STATUS_KEY, ''))
version = f'{version} *' if is_latest else version
s = f'{version:<13} | {linux_manager:<21} | {linux_driver:<21} | {windows_manager:<21} | {windows_driver:<21} | {release_date:>21} | {branch_status:^21}'
print(s)
def __parse_branch_status(string: str) -> str:
string = string.replace('Production Branch', 'Prod. -')
string = string.replace('Long-Term Support Branch', 'LTS -')
string = string.replace('supported until', '')
string = string.replace('EOL since', 'EOL - ')
string = string.replace('EOL from', 'EOL -')
return string
def __dump(filename: str):
import json
file = open(filename, 'w')
json.dump(MATRIX, file)
file.close()
if __name__ == '__main__':
MATRIX = {}
try:
import httpx
from bs4 import BeautifulSoup
except Exception as e:
logger.error(f'Failed to import module: {e}')
logger.info('Run "pip install beautifulsoup4 httpx"')
exit(1)
r = httpx.get(URL)
if r.status_code != 200:
logger.error(f'Error loading "{URL}" with status code {r.status_code}.')
exit(2)
# parse html
soup = BeautifulSoup(r.text, features='html.parser')
# build matrix
__driver_versions(soup)
# debug output
__debug()
# dump data to file
__dump('../app/static/driver_matrix.json')

View File

@@ -1,35 +1,39 @@
import sys
from base64 import b64encode as b64enc
from hashlib import sha256
from calendar import timegm
from datetime import datetime
from os.path import dirname, join
from datetime import datetime, UTC
from hashlib import sha256
from os import getenv as env
from uuid import uuid4, UUID
from dateutil.relativedelta import relativedelta
from jose import jwt, jwk
from jose import jwt
from jose.constants import ALGORITHMS
from sqlalchemy import create_engine
from starlette.testclient import TestClient
import sys
# add relative path to use packages as they were in the app/ dir
sys.path.append('../')
sys.path.append('../app')
from app import main
from app.util import load_key
from orm import init as db_init, migrate, Site, Instance
client = TestClient(main.app)
ORIGIN_REF, ALLOTMENT_REF, SECRET = str(uuid4()), '20000000-0000-0000-0000-000000000001', 'HelloWorld'
# INSTANCE_KEY_RSA = generate_key()
# INSTANCE_KEY_PUB = INSTANCE_KEY_RSA.public_key()
# fastapi setup
client = TestClient(main.app)
INSTANCE_KEY_RSA = load_key(str(join(dirname(__file__), '../app/cert/instance.private.pem')))
INSTANCE_KEY_PUB = load_key(str(join(dirname(__file__), '../app/cert/instance.public.pem')))
# database setup
db = create_engine(str(env('DATABASE', 'sqlite:///db.sqlite')))
db_init(db), migrate(db)
jwt_encode_key = jwk.construct(INSTANCE_KEY_RSA.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
jwt_decode_key = jwk.construct(INSTANCE_KEY_PUB.export_key().decode('utf-8'), algorithm=ALGORITHMS.RS256)
# test vars
DEFAULT_SITE, DEFAULT_INSTANCE = Site.get_default_site(db), Instance.get_default_instance(db)
SITE_KEY = DEFAULT_SITE.site_key
jwt_encode_key, jwt_decode_key = DEFAULT_INSTANCE.get_jwt_encode_key(), DEFAULT_INSTANCE.get_jwt_decode_key()
def __bearer_token(origin_ref: str) -> str:
@@ -38,6 +42,12 @@ def __bearer_token(origin_ref: str) -> str:
return token
def test_initial_default_site_and_instance():
default_site, default_instance = Site.get_default_site(db), Instance.get_default_instance(db)
assert default_site.site_key == Site.INITIAL_SITE_KEY_XID
assert default_instance.instance_ref == Instance.DEFAULT_INSTANCE_REF
def test_index():
response = client.get('/')
assert response.status_code == 200
@@ -106,6 +116,7 @@ def test_auth_v1_origin():
assert response.json().get('origin_ref') == ORIGIN_REF
def auth_v1_origin_update():
payload = {
"registration_pending": False,
@@ -141,7 +152,7 @@ def test_auth_v1_code():
def test_auth_v1_token():
cur_time = datetime.utcnow()
cur_time = datetime.now(UTC)
access_expires_on = cur_time + relativedelta(hours=1)
payload = {
@@ -153,8 +164,7 @@ def test_auth_v1_token():
"kid": "00000000-0000-0000-0000-000000000000"
}
payload = {
"auth_code": jwt.encode(payload, key=jwt_encode_key, headers={'kid': payload.get('kid')},
algorithm=ALGORITHMS.RS256),
"auth_code": jwt.encode(payload, key=jwt_encode_key, headers={'kid': payload.get('kid')}, algorithm=ALGORITHMS.RS256),
"code_verifier": SECRET,
}
@@ -187,8 +197,6 @@ def test_leasing_v1_lessor():
assert len(lease_result_list[0]['lease']['ref']) == 36
assert str(UUID(lease_result_list[0]['lease']['ref'])) == lease_result_list[0]['lease']['ref']
return lease_result_list[0]['lease']['ref']
def test_leasing_v1_lessor_lease():
response = client.get('/leasing/v1/lessor/leases', headers={'authorization': __bearer_token(ORIGIN_REF)})
@@ -231,7 +239,23 @@ def test_leasing_v1_lease_delete():
def test_leasing_v1_lessor_lease_remove():
lease_ref = test_leasing_v1_lessor()
# see "test_leasing_v1_lessor()"
payload = {
'fulfillment_context': {
'fulfillment_class_ref_list': []
},
'lease_proposal_list': [{
'license_type_qualifiers': {'count': 1},
'product': {'name': 'NVIDIA RTX Virtual Workstation'}
}],
'proposal_evaluation_mode': 'ALL_OF',
'scope_ref_list': [ALLOTMENT_REF]
}
response = client.post('/leasing/v1/lessor', json=payload, headers={'authorization': __bearer_token(ORIGIN_REF)})
lease_result_list = response.json().get('lease_result_list')
lease_ref = lease_result_list[0]['lease']['ref']
#
response = client.delete('/leasing/v1/lessor/leases', headers={'authorization': __bearer_token(ORIGIN_REF)})
assert response.status_code == 200

View File

@@ -1 +0,0 @@
VERSION=1.3.3