From 43d49d978ea441e0987eda471ba5744a74ee3c79 Mon Sep 17 00:00:00 2001 From: jaseg Date: Tue, 17 Nov 2020 19:58:42 +0100 Subject: Makefile: WIP --- Makefile | 43 +++++++++++++++++++++++++++ p2brute.py | 86 ----------------------------------------------------- tools/get_p2_url.py | 58 ++++++++++++++++++++++++++++++++++++ 3 files changed, 101 insertions(+), 86 deletions(-) create mode 100644 Makefile delete mode 100644 p2brute.py create mode 100644 tools/get_p2_url.py diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..a029670 --- /dev/null +++ b/Makefile @@ -0,0 +1,43 @@ + +BUILDDIR ?= build + +WGET ?= wget +PYTHON ?= python3 + +$(BUILDDIR): + mkdir -p $(BUILDDIR) + +.PRECIOUS: $(BUILDDIR)/%.jar +$(BUILDDIR)/%.jar: + mkdir -p $(BUILDDIR) + $(WGET) --tries=10 -N -O $@ $$($(PYTHON) tools/get_p2_url.py $*) + touch $@ + +$(BUILDDIR)/%.dir: $(BUILDDIR)/%.jar + mkdir -p $@ + rm -rf $@ + unzip -d $@ $< + +.PHONY: update_svd +update_svd: $(BUILDDIR)/com.st.stm32cube.ide.mcu.productdb.debug.dir + rm -rf svd + mkdir -p svd + cp $ 40: - return s[:length - 20 - 3] + '...' + s[-20:] - - return s[:length] - - -def fucked_up_get_with_progress(prefix, *args, **kwargs): - kwargs.update({'stream': True}) - res = fucked_up_get(*args, **kwargs) - res.raise_for_status() - - if shutil.get_terminal_size((80, 32)).columns < 120: - tqdm.write(prefix) - prefix = None - else: - prefix = crop(prefix) - - total_size_in_bytes= int(res.headers.get('content-length', 0)) - with tqdm(desc=prefix, total=total_size_in_bytes, unit='iB', unit_scale=True) as tq: - for data in res.iter_content(10000): - tq.update(len(data)) - yield data - -if __name__ == '__main__': - import argparse - parser = argparse.ArgumentParser() - parser.add_argument('-u', '--update-site', default='http://sw-center.st.com/stm32cubeide/updatesite1') - parser.add_argument('output_dir') - args = parser.parse_args() - - if not path.isdir(args.output_dir): - os.mkdir(args.output_dir) - - res = fucked_up_get(f'{args.update_site}/compositeContent.xml') - res.raise_for_status() - soup = BeautifulSoup(res.content, features='html.parser') - latest_version = soup.find('repository')['version'] - - res = fucked_up_get(f'{args.update_site}/{latest_version}/artifacts.xml.xz') - res.raise_for_status() - soup = BeautifulSoup(lzma.LZMADecompressor().decompress(res.content), features='html.parser') - - artifacts = soup.find_all('artifact', recursive=True) - for artifact in tqdm(artifacts): - art_id, art_ver, art_cls = artifact['id'], artifact['version'], artifact['classifier'] - if 'bundle' not in art_cls: - continue - - filename = f'{art_id}_{art_ver}.jar' - output_filename = path.join(args.output_dir, filename) - if path.isfile(output_filename): - tqdm.write(f'{filename} exists, skipping.') - continue - - with open(output_filename, 'wb') as f: - for block in fucked_up_get_with_progress(output_filename, f'{args.update_site}/{latest_version}/plugins/{filename}'): - f.write(block) - diff --git a/tools/get_p2_url.py b/tools/get_p2_url.py new file mode 100644 index 0000000..23b2d0b --- /dev/null +++ b/tools/get_p2_url.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python3 +import requests +import re +from bs4 import BeautifulSoup +import lzma +from urllib.parse import urlparse +from contextlib import suppress +import os +from os import path +from tqdm import tqdm +import shutil +import time +from pathlib import Path + +def fucked_up_get(*args, **kwargs): + for retry in range(5): + for timeout in [0.1, 0.5, 1.0, 2.0, 5.0]: + kwargs.update({'timeout': timeout}) + try: + return requests.get(*args, **kwargs) + except requests.exceptions.Timeout: + pass + time.sleep(0.5) + raise requests.exceptions.Timeout() + +def crop(s, length=80): + if len(s) <= length: + return s + (' ' * (length-len(s))) + + if length > 40: + return s[:length - 20 - 3] + '...' + s[-20:] + + return s[:length] + +if __name__ == '__main__': + import argparse + parser = argparse.ArgumentParser() + parser.add_argument('-u', '--update-site', default='http://sw-center.st.com/stm32cubeide/updatesite1') + parser.add_argument('artifact_id') + args = parser.parse_args() + + res = fucked_up_get(f'{args.update_site}/compositeContent.xml') + res.raise_for_status() + soup = BeautifulSoup(res.content, features='html.parser') + latest_version = soup.find('repository')['version'] + + res = fucked_up_get(f'{args.update_site}/{latest_version}/artifacts.xml.xz') + res.raise_for_status() + soup = BeautifulSoup(lzma.LZMADecompressor().decompress(res.content), features='html.parser') + + artifact = soup.find('artifact', recursive=True, id=args.artifact_id) + art_id, art_ver, art_cls = artifact['id'], artifact['version'], artifact['classifier'] + + filename = f'{art_id}_{art_ver}.jar' + + url = f'{args.update_site}/{latest_version}/plugins/{filename}' + print(f'{url}') + -- cgit