diff options
author | jaseg <git@jaseg.de> | 2020-11-13 20:28:12 +0100 |
---|---|---|
committer | jaseg <git@jaseg.de> | 2020-11-13 20:28:12 +0100 |
commit | 80835d33fd15ae7c6def03ec2abe15cc01cc2f18 (patch) | |
tree | 86b85c7ac298db88d5e4c9777a6d65e21542f0df | |
download | stm32square-80835d33fd15ae7c6def03ec2abe15cc01cc2f18.tar.gz stm32square-80835d33fd15ae7c6def03ec2abe15cc01cc2f18.tar.bz2 stm32square-80835d33fd15ae7c6def03ec2abe15cc01cc2f18.zip |
Initial commit
-rw-r--r-- | p2brute.py | 86 |
1 files changed, 86 insertions, 0 deletions
diff --git a/p2brute.py b/p2brute.py new file mode 100644 index 0000000..2f8067d --- /dev/null +++ b/p2brute.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python3 +import requests +import re +from bs4 import BeautifulSoup +import lzma +from urllib.parse import urlparse +from contextlib import suppress +import os +from os import path +from tqdm import tqdm +import shutil +import time + +def fucked_up_get(*args, **kwargs): + for retry in range(5): + for timeout in [0.1, 0.5, 1.0, 2.0, 5.0]: + kwargs.update({'timeout': timeout}) + try: + return requests.get(*args, **kwargs) + except requests.exceptions.Timeout: + pass + time.sleep(0.5) + raise requests.exceptions.Timeout() + +def crop(s, length=80): + if len(s) <= length: + return s + (' ' * (length-len(s))) + + if length > 40: + return s[:length - 20 - 3] + '...' + s[-20:] + + return s[:length] + + +def fucked_up_get_with_progress(prefix, *args, **kwargs): + kwargs.update({'stream': True}) + res = fucked_up_get(*args, **kwargs) + res.raise_for_status() + + if shutil.get_terminal_size((80, 32)).columns < 120: + tqdm.write(prefix) + prefix = None + else: + prefix = crop(prefix) + + total_size_in_bytes= int(res.headers.get('content-length', 0)) + with tqdm(desc=prefix, total=total_size_in_bytes, unit='iB', unit_scale=True) as tq: + for data in res.iter_content(10000): + tq.update(len(data)) + yield data + +if __name__ == '__main__': + import argparse + parser = argparse.ArgumentParser() + parser.add_argument('-u', '--update-site', default='http://sw-center.st.com/stm32cubeide/updatesite1') + parser.add_argument('output_dir') + args = parser.parse_args() + + if not path.isdir(args.output_dir): + os.mkdir(args.output_dir) + + res = fucked_up_get(f'{args.update_site}/compositeContent.xml') + res.raise_for_status() + soup = BeautifulSoup(res.content, features='html.parser') + latest_version = soup.find('repository')['version'] + + res = fucked_up_get(f'{args.update_site}/{latest_version}/artifacts.xml.xz') + res.raise_for_status() + soup = BeautifulSoup(lzma.LZMADecompressor().decompress(res.content), features='html.parser') + + artifacts = soup.find_all('artifact', recursive=True) + for artifact in tqdm(artifacts): + art_id, art_ver, art_cls = artifact['id'], artifact['version'], artifact['classifier'] + if 'bundle' not in art_cls: + continue + + filename = f'{art_id}_{art_ver}.jar' + output_filename = path.join(args.output_dir, filename) + if path.isfile(output_filename): + tqdm.write(f'{filename} exists, skipping.') + continue + + with open(output_filename, 'wb') as f: + for block in fucked_up_get_with_progress(output_filename, f'{args.update_site}/{latest_version}/plugins/{filename}'): + f.write(block) + |