aboutsummaryrefslogtreecommitdiff
path: root/p2brute.py
blob: 2f8067dd4ae6562a931347928bf6bc9806327b16 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
#!/usr/bin/env python3
import requests
import re
from bs4 import BeautifulSoup
import lzma
from urllib.parse import urlparse
from contextlib import suppress
import os
from os import path
from tqdm import tqdm
import shutil
import time

def fucked_up_get(*args, **kwargs):
    for retry in range(5):
        for timeout in [0.1, 0.5, 1.0, 2.0, 5.0]:
            kwargs.update({'timeout': timeout})
            try:
                return requests.get(*args, **kwargs)
            except requests.exceptions.Timeout:
                pass
        time.sleep(0.5)
    raise requests.exceptions.Timeout()

def crop(s, length=80):
    if len(s) <= length:
        return s + (' ' * (length-len(s)))

    if length > 40:
        return s[:length - 20 - 3] + '...' + s[-20:]

    return s[:length]


def fucked_up_get_with_progress(prefix, *args, **kwargs):
    kwargs.update({'stream': True})
    res = fucked_up_get(*args, **kwargs)
    res.raise_for_status()

    if shutil.get_terminal_size((80, 32)).columns < 120:
        tqdm.write(prefix)
        prefix = None
    else:
        prefix = crop(prefix)

    total_size_in_bytes= int(res.headers.get('content-length', 0))
    with tqdm(desc=prefix, total=total_size_in_bytes, unit='iB', unit_scale=True) as tq:
        for data in res.iter_content(10000):
            tq.update(len(data))
            yield data
        
if __name__ == '__main__':
    import argparse
    parser = argparse.ArgumentParser()
    parser.add_argument('-u', '--update-site', default='http://sw-center.st.com/stm32cubeide/updatesite1')
    parser.add_argument('output_dir')
    args = parser.parse_args()

    if not path.isdir(args.output_dir):
        os.mkdir(args.output_dir)

    res = fucked_up_get(f'{args.update_site}/compositeContent.xml')
    res.raise_for_status()
    soup = BeautifulSoup(res.content, features='html.parser')
    latest_version = soup.find('repository')['version']

    res = fucked_up_get(f'{args.update_site}/{latest_version}/artifacts.xml.xz')
    res.raise_for_status()
    soup = BeautifulSoup(lzma.LZMADecompressor().decompress(res.content), features='html.parser')

    artifacts = soup.find_all('artifact', recursive=True)
    for artifact in tqdm(artifacts):
        art_id, art_ver, art_cls = artifact['id'], artifact['version'], artifact['classifier']
        if 'bundle' not in art_cls:
            continue

        filename = f'{art_id}_{art_ver}.jar'
        output_filename = path.join(args.output_dir, filename)
        if path.isfile(output_filename):
            tqdm.write(f'{filename} exists, skipping.')
            continue

        with open(output_filename, 'wb') as f:
            for block in fucked_up_get_with_progress(output_filename, f'{args.update_site}/{latest_version}/plugins/{filename}'):
                f.write(block)