116 lines
3.8 KiB
Python
116 lines
3.8 KiB
Python
# SPDX-License-Identifier: GPL-2.0-or-later
|
|
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
|
|
# <pep8 compliant>
|
|
|
|
import re
|
|
import time
|
|
import urllib.request
|
|
|
|
import worker.blender.version
|
|
import worker.deploy.artifacts
|
|
import worker.deploy
|
|
import worker.utils
|
|
|
|
|
|
def monitor(builder: worker.deploy.CodeDeployBuilder) -> None:
|
|
wait_time_in_seconds = 120
|
|
|
|
start_time = time.time()
|
|
max_time_hours = 4.0
|
|
|
|
version_info = worker.blender.version.VersionInfo(builder)
|
|
|
|
required_base_url = "https://mirror.clarkson.edu/blender/release"
|
|
monitored_base_urls = [
|
|
"https://download.blender.org/release",
|
|
"https://ftp.nluug.nl/pub/graphics/blender/release",
|
|
"https://ftp.halifax.rwth-aachen.de/blender/release",
|
|
"https://mirrors.dotsrc.org/blender/blender-release",
|
|
"https://mirrors.ocf.berkeley.edu/blender/release",
|
|
"https://mirrors.iu13.net/blender/release",
|
|
"https://mirrors.aliyun.com/blender/release",
|
|
"https://mirrors.sahilister.in/blender/release",
|
|
"https://mirror.freedif.org/blender/release",
|
|
required_base_url,
|
|
]
|
|
|
|
stop_on_required_site_found = False
|
|
|
|
branches_config = builder.get_branches_config()
|
|
expected_platforms = branches_config.code_official_platform_architectures[
|
|
builder.track_id
|
|
]
|
|
|
|
expected_file_count = len(worker.deploy.artifacts.checksums)
|
|
for expected_platform in expected_platforms:
|
|
if expected_platform.startswith("windows"):
|
|
expected_file_count += 3 # msi, msix, zip
|
|
else:
|
|
expected_file_count += 1
|
|
|
|
folder_name = f"Blender{version_info.short_version}"
|
|
file_pattern = rf"[Bb]lender-{version_info.version}[\.\-\_a-zA-Z0-9]*"
|
|
|
|
while True:
|
|
found_site_count = 0
|
|
print("=" * 80)
|
|
|
|
# Assume no files are missing
|
|
sites_missing_files_count = 0
|
|
|
|
for base_url in monitored_base_urls:
|
|
search_url = f"{base_url}/{folder_name}"
|
|
print(f"Checking [{search_url}] for version [{version_info.version}]")
|
|
|
|
# Header to avoid getting permission denied.
|
|
request = urllib.request.Request(
|
|
search_url, headers={"User-Agent": "Mozilla"}
|
|
)
|
|
|
|
try:
|
|
response = urllib.request.urlopen(request, timeout=5.0)
|
|
text = response.read().decode("utf-8", "ignore")
|
|
except Exception as e:
|
|
print(e)
|
|
text = ""
|
|
|
|
matches = set(re.findall(file_pattern, text))
|
|
len(matches)
|
|
for match in matches:
|
|
print(f"File [{match}]")
|
|
|
|
if len(matches) == expected_file_count:
|
|
found_site_count += 1
|
|
elif len(matches) > 0:
|
|
sites_missing_files_count += 1
|
|
print("-" * 80)
|
|
|
|
can_stop_monitoring = (
|
|
(len(matches) == expected_file_count)
|
|
and (base_url == required_base_url)
|
|
and (sites_missing_files_count == 0)
|
|
)
|
|
|
|
if stop_on_required_site_found and can_stop_monitoring:
|
|
print(f"Required site found [{required_base_url}], stopping")
|
|
return
|
|
|
|
print("")
|
|
print("=" * 80)
|
|
print(
|
|
f"Sites [{found_site_count} of {len(monitored_base_urls)}] have all files"
|
|
)
|
|
print("=" * 80)
|
|
|
|
if found_site_count == len(monitored_base_urls):
|
|
break
|
|
|
|
remaining_time_hours = max_time_hours - (time.time() - start_time) / 3600.0
|
|
if remaining_time_hours < 0.0:
|
|
print("Waited for maximum amount of time, stopping")
|
|
break
|
|
|
|
print(
|
|
f"Waiting {wait_time_in_seconds}s, total wait time remaining {remaining_time_hours:.2f}h"
|
|
)
|
|
time.sleep(wait_time_in_seconds)
|