aboutsummaryrefslogtreecommitdiff
path: root/tools
diff options
context:
space:
mode:
authorjaseg <git@jaseg.de>2020-11-17 19:58:42 +0100
committerjaseg <git@jaseg.de>2020-11-17 19:58:42 +0100
commit43d49d978ea441e0987eda471ba5744a74ee3c79 (patch)
treed13e1697fcb5e7f1689806daa26040f13d8b0b38 /tools
parent80835d33fd15ae7c6def03ec2abe15cc01cc2f18 (diff)
downloadstm32square-43d49d978ea441e0987eda471ba5744a74ee3c79.tar.gz
stm32square-43d49d978ea441e0987eda471ba5744a74ee3c79.tar.bz2
stm32square-43d49d978ea441e0987eda471ba5744a74ee3c79.zip
Makefile: WIP
Diffstat (limited to 'tools')
-rw-r--r--tools/get_p2_url.py58
1 files changed, 58 insertions, 0 deletions
diff --git a/tools/get_p2_url.py b/tools/get_p2_url.py
new file mode 100644
index 0000000..23b2d0b
--- /dev/null
+++ b/tools/get_p2_url.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python3
+import requests
+import re
+from bs4 import BeautifulSoup
+import lzma
+from urllib.parse import urlparse
+from contextlib import suppress
+import os
+from os import path
+from tqdm import tqdm
+import shutil
+import time
+from pathlib import Path
+
+def fucked_up_get(*args, **kwargs):
+ for retry in range(5):
+ for timeout in [0.1, 0.5, 1.0, 2.0, 5.0]:
+ kwargs.update({'timeout': timeout})
+ try:
+ return requests.get(*args, **kwargs)
+ except requests.exceptions.Timeout:
+ pass
+ time.sleep(0.5)
+ raise requests.exceptions.Timeout()
+
+def crop(s, length=80):
+ if len(s) <= length:
+ return s + (' ' * (length-len(s)))
+
+ if length > 40:
+ return s[:length - 20 - 3] + '...' + s[-20:]
+
+ return s[:length]
+
+if __name__ == '__main__':
+ import argparse
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-u', '--update-site', default='http://sw-center.st.com/stm32cubeide/updatesite1')
+ parser.add_argument('artifact_id')
+ args = parser.parse_args()
+
+ res = fucked_up_get(f'{args.update_site}/compositeContent.xml')
+ res.raise_for_status()
+ soup = BeautifulSoup(res.content, features='html.parser')
+ latest_version = soup.find('repository')['version']
+
+ res = fucked_up_get(f'{args.update_site}/{latest_version}/artifacts.xml.xz')
+ res.raise_for_status()
+ soup = BeautifulSoup(lzma.LZMADecompressor().decompress(res.content), features='html.parser')
+
+ artifact = soup.find('artifact', recursive=True, id=args.artifact_id)
+ art_id, art_ver, art_cls = artifact['id'], artifact['version'], artifact['classifier']
+
+ filename = f'{art_id}_{art_ver}.jar'
+
+ url = f'{args.update_site}/{latest_version}/plugins/{filename}'
+ print(f'{url}')
+