aboutsummaryrefslogtreecommitdiff
path: root/tools/get_p2_url.py
blob: f2162cde9a24feedfea8eff948dcc98695647c3d (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
#!/usr/bin/env python3
import requests
import re
from bs4 import BeautifulSoup
import sys
import lzma
from urllib.parse import urlparse
from contextlib import suppress
import os
from os import path
from tqdm import tqdm
import shutil
import time
from pathlib import Path

def fucked_up_get(*args, **kwargs):
    for retry in range(5):
        for timeout in [0.1, 0.5, 1.0, 2.0, 5.0]:
            kwargs.update({'timeout': timeout})
            try:
                return requests.get(*args, **kwargs)
            except requests.exceptions.Timeout:
                pass
        time.sleep(0.5)
    raise requests.exceptions.Timeout()

def crop(s, length=80):
    if len(s) <= length:
        return s + (' ' * (length-len(s)))

    if length > 40:
        return s[:length - 20 - 3] + '...' + s[-20:]

    return s[:length]

if __name__ == '__main__':
    import argparse
    parser = argparse.ArgumentParser()
    parser.add_argument('-u', '--update-site', default='http://sw-center.st.com/stm32cubeide/updatesite1')
    parser.add_argument('artifact_id')
    parser.add_argument('-v', '--version', action='store_true')
    parser.add_argument('--write-version', type=argparse.FileType('w'))
    args = parser.parse_args()

    if args.write_version:
        args.version = True
        version_file = args.write_version
    else:
        version_file = sys.stdout

    if args.version:
        print(f'[Artifact {args.artifact_id}]', file=version_file)

    res = fucked_up_get(f'{args.update_site}/compositeContent.xml')
    res.raise_for_status()
    soup = BeautifulSoup(res.content, features='html.parser')
    latest_version = soup.find('repository')['version']
    if args.version:
        print('IDE version:', latest_version, file=version_file)

    res = fucked_up_get(f'{args.update_site}/{latest_version}/artifacts.xml.xz')
    res.raise_for_status()
    soup = BeautifulSoup(lzma.LZMADecompressor().decompress(res.content), features='html.parser')

    artifact = soup.find('artifact', recursive=True, id=args.artifact_id)
    art_id, art_ver, art_cls = artifact['id'], artifact['version'], artifact['classifier']
    if args.version:
        print('Artifact version:', artifact['version'], file=version_file)

    filename = f'{art_id}_{art_ver}.jar'
    url = f'{args.update_site}/{latest_version}/plugins/{filename}'

    if not args.version or args.write_version:
        print(f'{url}')