[POSTFIX] refactoring

This commit is contained in:
Ludwig Lehnert 2025-02-16 08:50:21 +01:00
parent b32ae9fea7
commit e51a12a965
3 changed files with 28 additions and 19 deletions

View File

@ -1,29 +1,30 @@
# FAU-TV Video Downloader
Downloads all videos of a given course (even if the download has been restricted) to a local folder.
This software is provided without warranty. Usage is discouraged! Usage of this software is at your own risk!
This software is provided without warranty. Usage is discouraged! Use this software at your own risk!
## Usage
```bash
usage: dl.py [-h] [--out-dir OUT_DIR] [--starter=url STARTER=URL] [--start-at START_AT] [--auth-token AUTH_TOKEN]
[--session-id SESSION_ID] [--session-ci SESSION_CI]
course-id
usage: dl.py [-h] [--out-dir OUT_DIR] [--starter-url STARTER_URL] [--start-at START_AT] [--auth-token AUTH_TOKEN]
[--session-id SESSION_ID] [--session-ci SESSION_CI] [--no-auth]
course_id
Download clips from a course
positional arguments:
course-id Course ID
course_id Course ID
options:
-h, --help show this help message and exit
--out-dir OUT_DIR Output directory
--starter=url STARTER=URL
--starter-url STARTER_URL
Starter URL
--start-at START_AT Skip all previous indices (defaults to 1)
--auth-token AUTH_TOKEN
--session-id SESSION_ID
--session-ci SESSION_CI
--no-auth
```
## Procedure

22
dl.py
View File

@ -8,10 +8,10 @@ def parse_args():
description='Download clips from a course',
)
parser.add_argument('course-id', type=str, help='Course ID')
parser.add_argument('course_id', type=str, help='Course ID')
parser.add_argument('--out-dir', type=str,
help='Output directory', default='./out')
parser.add_argument('--starter=url', type=str,
parser.add_argument('--starter-url', type=str,
help='Starter URL', default="https://www.fau.tv/auth/sso")
parser.add_argument('--start-at', type=int,
help='Skip all previous indices (defaults to 1)', default=1)
@ -20,24 +20,26 @@ def parse_args():
parser.add_argument('--session-id', type=str)
parser.add_argument('--session-ci', type=str)
parser.add_argument('--no-auth', action='store_true')
return parser.parse_args()
def main():
args = parse_args()
if args.authToken and args.sessionID and args.sessionCI:
set_token(args.authToken, args.sessionID, args.sessionCI)
else:
load_token(args.starterUrl)
if not args.no_auth and args.auth_token and args.session_id and args.session_ci:
set_token(args.auth_token, args.session_id, args.session_ci)
elif not args.no_auth:
load_token(args.starter_url)
os.makedirs(args.outDir, exist_ok=True)
os.makedirs(args.out_dir, exist_ok=True)
for index, clip_id in enumerate(get_course_clip_ids(args.courseId)):
if index < args.startAt - 1:
for index, clip_id in enumerate(get_course_clip_ids(args.course_id)):
if index < args.start_at - 1:
continue
download_clip(clip_id, f'{args.outDir}/{index+1: 04d}_{clip_id}.mp4')
download_clip(clip_id, f'{args.out_dir}/{index+1: 04d}_{clip_id}.mp4')
if __name__ == '__main__':

12
lib.py
View File

@ -9,6 +9,12 @@ import shutil
_token: "Token" = None
def _cookies() -> dict[str, str]:
if _token is None:
return {}
return _token.cookies()
@dataclass
class Token():
auth_token: str
@ -95,7 +101,7 @@ def get_course_clip_ids(course_id: str) -> list[str]:
urls = []
url = f'https://www.fau.tv/course/id/{course_id}'
with requests.get(url, cookies=_token.cookies()) as r:
with requests.get(url, cookies=_cookies()) as r:
clip_matches = regex.findall(r.text)
for match in clip_matches:
urls.append(match[1])
@ -109,7 +115,7 @@ def get_clip_details(clip_id: str) -> ClipDetails:
url = f'https://www.fau.tv/clip/id/{clip_id}'
details = ClipDetails(clip_id=clip_id)
with requests.get(url, cookies=_token.cookies()) as r:
with requests.get(url, cookies=_cookies()) as r:
def get_details(keyword: str):
mediaid_re = re.compile(
r'(' + keyword + r'Sources[^,]*,\s+mediaid\:\s+\")([0-9]+)'
@ -141,7 +147,7 @@ def download_media(media_id: str, outfile_path: str):
url = f'https://itunes.video.uni-erlangen.de/get/file/' + \
str(media_id) + '?download=1'
with requests.get(url, stream=True, cookies=_token.cookies()) as r:
with requests.get(url, stream=True, cookies=_cookies()) as r:
if (r.status_code != 200):
return False