blob: 5e9111b393cb39a767c325aba4a1788ceee22c1b [file] [log] [blame]
Dean Birchd0f9f8c2020-03-26 11:10:33 +00001#!/usr/bin/env python3
2#
3# Downloads artifacts from a build of tf-m-build-and-test
4#
5
6__copyright__ = """
7/*
8 * Copyright (c) 2020, Arm Limited. All rights reserved.
9 *
10 * SPDX-License-Identifier: BSD-3-Clause
11 *
12 */
13 """
14
15import requests
16import argparse
17import os
18from urllib.parse import urljoin
19from html.parser import HTMLParser
20
21
22class UrlExtracter(HTMLParser):
23 def __init__(self):
24 super().__init__()
25 self.last_tag = None
26 self.last_link = None
27 self.last_config = None
28 self.build_artifacts = {}
29 self.build_logs = {}
30
31 def handle_starttag(self, tag, attrs):
32 for key, value in attrs:
33 if key == "href":
34 self.last_link = value
35 self.last_tag = tag
36
37 def handle_endtag(self, tag):
38 if tag == "br":
39 self.last_tag = None
40
41 def handle_data(self, data):
42 if not self.last_tag:
43 self.last_config = data.replace(": ", "").replace("\n", "")
44 return
45
46 if self.last_tag == "a":
47 if data == "Artifacts":
48 self.build_artifacts[self.last_config] = self.last_link
49 elif data == "Logs":
50 self.build_logs[self.last_config] = self.last_link
51
52
53def download_artifacts(url, save_dir):
54 if not url.endswith("/"):
55 url += "/"
56 job_page_req = requests.get(url)
57 if job_page_req.status_code != requests.codes.ok:
58 print("Issue contacting given URL")
59 return
60 print("Found build")
61 build_links_req = requests.get(urljoin(url, "artifact/build_links.html"))
62 if build_links_req.status_code != requests.codes.ok:
63 print("Given build did not have an artifact called `build_links.html`")
64 return
65 parser = UrlExtracter()
66 print("Extracting links from build_links.html")
67 parser.feed(build_links_req.text)
68 print("Links found")
69 if not os.path.exists(save_dir):
70 print("Creating directory at {}".format(save_dir))
71 os.makedirs(save_dir)
72 else:
73 print("Reusing directory at {}.")
74 for config, log_url in parser.build_logs.items():
75 print("Downloading {}".format(log_url))
76 log_req = requests.get(log_url)
77 log_file_path = os.path.join(save_dir, "{}.log".format(config))
78 with open(log_file_path, "w") as log_file:
79 log_file.write(log_req.text)
80 print("Saved log to {}".format(log_file_path))
81 for config, artifacts_url in parser.build_artifacts.items():
82 zip_url = urljoin(artifacts_url, "*zip*/archive.zip")
83 print("Downloading {}".format(zip_url))
84 artifact_zip_req = requests.get(zip_url, stream=True)
85 zip_file = os.path.join(save_dir, "{}.zip".format(config))
86 with open(zip_file, "wb") as artifact_zip:
87 for chunk in artifact_zip_req.iter_content(chunk_size=8192):
88 artifact_zip.write(chunk)
89 print("Saved artifacts zip to {}".format(zip_file))
90 print("Finished")
91
92
93def main():
94 argparser = argparse.ArgumentParser()
95 argparser.add_argument(
96 "job_url", help="Url to a completed build of tf-m-build-and-test"
97 )
98 argparser.add_argument(
99 "-o", "--output_dir", default="artifacts", help="Location to save artifacts to."
100 )
101 args = argparser.parse_args()
102 download_artifacts(args.job_url, args.output_dir)
103
104
105if __name__ == "__main__":
106 main()