aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorjaseg <git@jaseg.de>2020-11-17 19:58:42 +0100
committerjaseg <git@jaseg.de>2020-11-17 19:58:42 +0100
commit43d49d978ea441e0987eda471ba5744a74ee3c79 (patch)
treed13e1697fcb5e7f1689806daa26040f13d8b0b38
parent80835d33fd15ae7c6def03ec2abe15cc01cc2f18 (diff)
downloadstm32square-43d49d978ea441e0987eda471ba5744a74ee3c79.tar.gz
stm32square-43d49d978ea441e0987eda471ba5744a74ee3c79.tar.bz2
stm32square-43d49d978ea441e0987eda471ba5744a74ee3c79.zip
Makefile: WIP
-rw-r--r--Makefile43
-rw-r--r--tools/get_p2_url.py (renamed from p2brute.py)42
2 files changed, 50 insertions, 35 deletions
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..a029670
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,43 @@
+
+BUILDDIR ?= build
+
+WGET ?= wget
+PYTHON ?= python3
+
+$(BUILDDIR):
+ mkdir -p $(BUILDDIR)
+
+.PRECIOUS: $(BUILDDIR)/%.jar
+$(BUILDDIR)/%.jar:
+ mkdir -p $(BUILDDIR)
+ $(WGET) --tries=10 -N -O $@ $$($(PYTHON) tools/get_p2_url.py $*)
+ touch $@
+
+$(BUILDDIR)/%.dir: $(BUILDDIR)/%.jar
+ mkdir -p $@
+ rm -rf $@
+ unzip -d $@ $<
+
+.PHONY: update_svd
+update_svd: $(BUILDDIR)/com.st.stm32cube.ide.mcu.productdb.debug.dir
+ rm -rf svd
+ mkdir -p svd
+ cp $</resources/cmsis/STMicroelectronics_CMSIS_SVD/{*.svd,License.html} svd/
+
+.PHONY: update_xml
+update_xml: $(BUILDDIR)/com.st.stm32cube.common.mx.dir
+ rm -rf mx_mcu_db
+ mkdir -p mx_mcu_db
+ cp $</about.html mx_mcu_db/License.html
+ cp -r $</db/mcu/* mx_mcu_db/
+
+update_prog_db: $(BUILDDIR)/com.st.stm32cube.ide.mcu.externaltools.cubeprogrammer.linux64.dir
+ rm -rf prog_db
+ mkdir -p prog_db
+ cp $</about.html prog_db/License.html
+ cp $</tools/Data_Base/*.xml prog_db/
+
+# The following file contains garbage data on the µC's memories.
+#stm32targets.xml: $(BUILDDIR)/com.st.stm32cube.ide.mcu.productdb.dir
+# cp $</resources/board_def/$@ $@
+#$(BUILDDIR)/com.st.stm32cube.ide.mcu.productdb.dir:
diff --git a/p2brute.py b/tools/get_p2_url.py
index 2f8067d..23b2d0b 100644
--- a/p2brute.py
+++ b/tools/get_p2_url.py
@@ -10,6 +10,7 @@ from os import path
from tqdm import tqdm
import shutil
import time
+from pathlib import Path
def fucked_up_get(*args, **kwargs):
for retry in range(5):
@@ -31,34 +32,13 @@ def crop(s, length=80):
return s[:length]
-
-def fucked_up_get_with_progress(prefix, *args, **kwargs):
- kwargs.update({'stream': True})
- res = fucked_up_get(*args, **kwargs)
- res.raise_for_status()
-
- if shutil.get_terminal_size((80, 32)).columns < 120:
- tqdm.write(prefix)
- prefix = None
- else:
- prefix = crop(prefix)
-
- total_size_in_bytes= int(res.headers.get('content-length', 0))
- with tqdm(desc=prefix, total=total_size_in_bytes, unit='iB', unit_scale=True) as tq:
- for data in res.iter_content(10000):
- tq.update(len(data))
- yield data
-
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--update-site', default='http://sw-center.st.com/stm32cubeide/updatesite1')
- parser.add_argument('output_dir')
+ parser.add_argument('artifact_id')
args = parser.parse_args()
- if not path.isdir(args.output_dir):
- os.mkdir(args.output_dir)
-
res = fucked_up_get(f'{args.update_site}/compositeContent.xml')
res.raise_for_status()
soup = BeautifulSoup(res.content, features='html.parser')
@@ -68,19 +48,11 @@ if __name__ == '__main__':
res.raise_for_status()
soup = BeautifulSoup(lzma.LZMADecompressor().decompress(res.content), features='html.parser')
- artifacts = soup.find_all('artifact', recursive=True)
- for artifact in tqdm(artifacts):
- art_id, art_ver, art_cls = artifact['id'], artifact['version'], artifact['classifier']
- if 'bundle' not in art_cls:
- continue
+ artifact = soup.find('artifact', recursive=True, id=args.artifact_id)
+ art_id, art_ver, art_cls = artifact['id'], artifact['version'], artifact['classifier']
- filename = f'{art_id}_{art_ver}.jar'
- output_filename = path.join(args.output_dir, filename)
- if path.isfile(output_filename):
- tqdm.write(f'{filename} exists, skipping.')
- continue
+ filename = f'{art_id}_{art_ver}.jar'
- with open(output_filename, 'wb') as f:
- for block in fucked_up_get_with_progress(output_filename, f'{args.update_site}/{latest_version}/plugins/{filename}'):
- f.write(block)
+ url = f'{args.update_site}/{latest_version}/plugins/{filename}'
+ print(f'{url}')