builder.braak.pro/config/worker/deploy/artifacts.py
2024-11-19 21:59:53 +01:00

280 lines
9.5 KiB
Python

# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
import json
import os
import pathlib
import urllib.request
from typing import Any, Dict
import worker.blender
import worker.blender.version
import worker.deploy
import worker.utils
checksums = ["md5", "sha256"]
def pull(builder: worker.deploy.CodeDeployBuilder) -> None:
pipeline_category = "daily"
if builder.track_id == "vexp":
pipeline_category = "experimental"
log_path = builder.track_path / "log"
worker.utils.remove_dir(log_path)
os.makedirs(log_path, exist_ok=True)
worker.utils.info("Cleaning package directory")
worker.utils.remove_dir(builder.package_dir)
os.makedirs(builder.package_dir, exist_ok=True)
# Fetch builds information.
env_base_url = {
"LOCAL": "https://builder.blender.org",
"UATEST": "https://builder.uatest.blender.org",
"PROD": "https://builder.blender.org",
}
base_url = env_base_url[builder.service_env_id]
search_url = f"{base_url}/download/{pipeline_category}?format=json&v=1"
worker.utils.info(f"Fetching build JSON from [{search_url}]")
builds_response = urllib.request.urlopen(search_url)
# TODO -timeout_sec timeout_in_seconds -retry_interval_sec retry_delay_in_seconds -maximum_retry_count retry_count
builds_json = json.load(builds_response)
# Get builds matching our version.
worker.utils.info("Processing build JSON")
version_info = worker.blender.version.VersionInfo(builder)
unique_builds: Dict[Any, Dict[Any, Any]] = {}
for build in builds_json:
if build["version"] != version_info.version:
continue
if build["file_extension"] in checksums:
continue
# Correct incomplete file extension in JSON.
if build["file_name"].endswith(".tar.xz"):
build["file_extension"] = "tar.xz"
elif build["file_name"].endswith(".tar.gz"):
build["file_extension"] = "tar.gz"
elif build["file_name"].endswith(".tar.bz2"):
build["file_extension"] = "tar.bz2"
key = (build["platform"], build["architecture"], build["file_extension"])
if key in unique_builds:
# Prefer more stable builds, to avoid issue when multiple are present.
risk_id_order = ["stable", "candidate", "rc", "beta", "alpha", "edge"]
risk = build["risk_id"]
risk = (
risk_id_order.index(risk)
if risk in risk_id_order
else len(risk_id_order)
)
other_risk = unique_builds[key]["risk_id"]
other_risk = (
risk_id_order.index(other_risk)
if other_risk in risk_id_order
else len(risk_id_order)
)
if other_risk <= risk:
continue
else:
print(" ".join(key))
unique_builds[key] = build
builds = list(unique_builds.values())
if len(builds) == 0:
raise Exception(
f"No builds found for version [{version_info.version}] in [{search_url}]"
)
# Download builds.
worker.utils.remove_dir(builder.download_dir)
os.makedirs(builder.download_dir, exist_ok=True)
for build in builds:
file_uri = build["url"]
file_name = build["file_name"]
worker.utils.info(f"Pull [{file_name}]")
download_file_path = builder.download_dir / file_name
worker.utils.info(f"Download [{file_uri}]")
urllib.request.urlretrieve(file_uri, download_file_path)
# TODO: retry and resume
# -resume -timeout_sec timeout_in_seconds -retry_interval_sec retry_delay_in_seconds -maximum_retry_count retry_count
# Moving to build_package folder
worker.utils.info(f"Move to [{builder.package_dir}]")
worker.utils.move(
download_file_path, builder.package_dir / download_file_path.name
)
worker.utils.remove_dir(builder.download_dir)
# Write manifest of downloaded packages.
package_manifest = builder.package_dir / "manifest.json"
package_manifest.write_text(json.dumps(builds, indent=2))
def repackage(builder: worker.deploy.CodeDeployBuilder) -> None:
version_info = worker.blender.version.VersionInfo(builder)
deployable_path = builder.package_dir / "deployable"
worker.utils.remove_dir(deployable_path)
os.makedirs(deployable_path, exist_ok=True)
os.chdir(deployable_path)
package_manifest = builder.package_dir / "manifest.json"
builds = json.loads(package_manifest.read_text())
checksum_file_paths = []
# Rename the files and the internal folders for zip and tar.xz files
for build in builds:
file_name = build["file_name"]
file_path = builder.package_dir / file_name
worker.utils.info(f"Repackaging {file_name}")
if builder.service_env_id == "PROD" and build["risk_id"] != "stable":
raise Exception(
"Can only repackage and deploy stable versions, found risk id '{build['risk_id']}'"
)
version = build["version"]
platform = build["platform"].replace("darwin", "macos")
architecture = build["architecture"].replace("86_", "").replace("amd", "x")
file_extension = build["file_extension"]
current_folder_name = file_path.name[: -len("." + file_extension)]
new_folder_name = f"blender-{version}-{platform}-{architecture}"
new_file_name = f"{new_folder_name}.{file_extension}"
source_file_path = file_path
dest_file_path = deployable_path / new_file_name
worker.utils.info(f"Renaming file [{source_file_path}] to [{dest_file_path}]")
worker.utils.copy_file(source_file_path, dest_file_path)
if file_extension == "zip":
worker.utils.info(f"Renaming internal folder to [{new_folder_name}]")
worker.utils.call(
["7z", "rn", dest_file_path, current_folder_name, new_folder_name]
)
elif file_extension == "tar.xz":
worker.utils.info(f"Extracting [{source_file_path}] to [{dest_file_path}]")
worker.utils.call(["tar", "-xf", source_file_path, "--directory", "."])
worker.utils.remove_file(dest_file_path)
worker.utils.move(
deployable_path / current_folder_name, deployable_path / new_folder_name
)
worker.utils.info(f"Compressing [{new_folder_name}] to [{dest_file_path}]")
cmd = [
"tar",
"-cv",
"--owner=0",
"--group=0",
"--use-compress-program",
"xz -6",
"-f",
dest_file_path,
new_folder_name,
]
worker.utils.call(cmd)
worker.utils.remove_dir(deployable_path / new_folder_name)
checksum_file_paths.append(dest_file_path)
# Create checksums
worker.utils.info("Creating checksums")
os.chdir(deployable_path)
for checksum in checksums:
checksum_text = ""
for filepath in checksum_file_paths:
checksum_line = worker.utils.check_output(
[f"{checksum}sum", filepath.name]
).strip()
checksum_text += checksum_line + "\n"
print(checksum_text)
checksum_filepath = (
deployable_path / f"blender-{version_info.version}.{checksum}"
)
checksum_filepath.write_text(checksum_text)
def deploy(builder: worker.deploy.CodeDeployBuilder) -> None:
# No testable on UATEST currently.
dry_run = builder.service_env_id not in ("LOCAL", "PROD")
worker_config = builder.get_worker_config()
connect_id = f"{worker_config.download_user}@{worker_config.download_machine}"
# Copy source
remote_dest_path = pathlib.Path(worker_config.download_source_folder)
change_modes = ["F0444"]
if builder.service_env_id != "PROD":
# Already assumed to exist on production
worker.utils.call_ssh(
connect_id, ["mkdir", "-p", remote_dest_path], dry_run=dry_run
)
for source_path in builder.package_source_dir.iterdir():
dest_path = f"{connect_id}:{remote_dest_path}/"
worker.utils.info(f"Deploying source package [{source_path}]")
worker.utils.rsync(
source_path,
dest_path,
change_modes=change_modes,
show_names=True,
dry_run=dry_run,
)
worker.utils.call_ssh(
connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run
)
# Copy binaries
version_info = worker.blender.version.VersionInfo(builder)
major_minor_version = version_info.short_version
remote_dest_path = (
pathlib.Path(worker_config.download_release_folder)
/ f"Blender{major_minor_version}"
)
deployable_path = builder.package_dir / "deployable"
change_modes = ["F0444"]
worker.utils.call_ssh(
connect_id, ["mkdir", "-p", remote_dest_path], dry_run=dry_run
)
worker.utils.call_ssh(
connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run
)
for source_path in deployable_path.iterdir():
dest_path = f"{connect_id}:{remote_dest_path}/"
worker.utils.info(f"Deploying binary package [{source_path}]")
worker.utils.rsync(
source_path,
dest_path,
change_modes=change_modes,
show_names=True,
dry_run=dry_run,
)
worker.utils.call_ssh(
connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run
)