Get back to original

This commit is contained in:
Bart van der Braak 2024-11-20 16:02:13 +01:00
parent 77ae214d24
commit 5cc9d7b0e9
68 changed files with 83 additions and 42 deletions

View file

@ -0,0 +1,41 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
import argparse
import pathlib
import worker.blender
import worker.utils
class CodeDeployBuilder(worker.blender.CodeBuilder):
def __init__(self, args: argparse.Namespace):
super().__init__(args)
self.platform_ids = ["linux", "darwin", "windows"]
self.setup_track_path()
track_path: pathlib.Path = self.track_path
self.download_dir = track_path / "build_download"
self.package_source_dir = track_path / "build_source"
self.store_steam_dir = track_path / "build_store_steam"
self.store_snap_dir = track_path / "build_store_snap"
self.store_windows_dir = track_path / "build_store_windows"
def clean(self):
worker.utils.remove_dir(self.download_dir)
worker.utils.remove_dir(self.package_dir)
worker.utils.remove_dir(self.package_source_dir)
worker.utils.remove_dir(self.store_steam_dir)
worker.utils.remove_dir(self.store_snap_dir)
worker.utils.remove_dir(self.store_windows_dir)
# Created by make source_archive_complete
worker.utils.remove_dir(self.track_path / "build_linux")
worker.utils.remove_dir(self.track_path / "build_darwin")
class CodeStoreBuilder(CodeDeployBuilder):
def __init__(self, args: argparse.Namespace):
super().__init__(args)
self.store_id = args.store_id

View file

@ -0,0 +1,280 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
import json
import os
import pathlib
import urllib.request
from typing import Any, Dict
import worker.blender
import worker.blender.version
import worker.deploy
import worker.utils
checksums = ["md5", "sha256"]
def pull(builder: worker.deploy.CodeDeployBuilder) -> None:
pipeline_category = "daily"
if builder.track_id == "vexp":
pipeline_category = "experimental"
log_path = builder.track_path / "log"
worker.utils.remove_dir(log_path)
os.makedirs(log_path, exist_ok=True)
worker.utils.info("Cleaning package directory")
worker.utils.remove_dir(builder.package_dir)
os.makedirs(builder.package_dir, exist_ok=True)
# Fetch builds information.
env_base_url = {
"LOCAL": "https://builder.blender.org",
"UATEST": "https://builder.uatest.blender.org",
"PROD": "https://builder.blender.org",
}
base_url = env_base_url[builder.service_env_id]
search_url = f"{base_url}/download/{pipeline_category}?format=json&v=1"
worker.utils.info(f"Fetching build JSON from [{search_url}]")
builds_response = urllib.request.urlopen(search_url)
# TODO -timeout_sec timeout_in_seconds -retry_interval_sec retry_delay_in_seconds -maximum_retry_count retry_count
builds_json = json.load(builds_response)
# Get builds matching our version.
worker.utils.info("Processing build JSON")
version_info = worker.blender.version.VersionInfo(builder)
unique_builds: Dict[Any, Dict[Any, Any]] = {}
for build in builds_json:
if build["version"] != version_info.version:
continue
if build["file_extension"] in checksums:
continue
# Correct incomplete file extension in JSON.
if build["file_name"].endswith(".tar.xz"):
build["file_extension"] = "tar.xz"
elif build["file_name"].endswith(".tar.gz"):
build["file_extension"] = "tar.gz"
elif build["file_name"].endswith(".tar.bz2"):
build["file_extension"] = "tar.bz2"
key = (build["platform"], build["architecture"], build["file_extension"])
if key in unique_builds:
# Prefer more stable builds, to avoid issue when multiple are present.
risk_id_order = ["stable", "candidate", "rc", "beta", "alpha", "edge"]
risk = build["risk_id"]
risk = (
risk_id_order.index(risk)
if risk in risk_id_order
else len(risk_id_order)
)
other_risk = unique_builds[key]["risk_id"]
other_risk = (
risk_id_order.index(other_risk)
if other_risk in risk_id_order
else len(risk_id_order)
)
if other_risk <= risk:
continue
else:
print(" ".join(key))
unique_builds[key] = build
builds = list(unique_builds.values())
if len(builds) == 0:
raise Exception(
f"No builds found for version [{version_info.version}] in [{search_url}]"
)
# Download builds.
worker.utils.remove_dir(builder.download_dir)
os.makedirs(builder.download_dir, exist_ok=True)
for build in builds:
file_uri = build["url"]
file_name = build["file_name"]
worker.utils.info(f"Pull [{file_name}]")
download_file_path = builder.download_dir / file_name
worker.utils.info(f"Download [{file_uri}]")
urllib.request.urlretrieve(file_uri, download_file_path)
# TODO: retry and resume
# -resume -timeout_sec timeout_in_seconds -retry_interval_sec retry_delay_in_seconds -maximum_retry_count retry_count
# Moving to build_package folder
worker.utils.info(f"Move to [{builder.package_dir}]")
worker.utils.move(
download_file_path, builder.package_dir / download_file_path.name
)
worker.utils.remove_dir(builder.download_dir)
# Write manifest of downloaded packages.
package_manifest = builder.package_dir / "manifest.json"
package_manifest.write_text(json.dumps(builds, indent=2))
def repackage(builder: worker.deploy.CodeDeployBuilder) -> None:
version_info = worker.blender.version.VersionInfo(builder)
deployable_path = builder.package_dir / "deployable"
worker.utils.remove_dir(deployable_path)
os.makedirs(deployable_path, exist_ok=True)
os.chdir(deployable_path)
package_manifest = builder.package_dir / "manifest.json"
builds = json.loads(package_manifest.read_text())
checksum_file_paths = []
# Rename the files and the internal folders for zip and tar.xz files
for build in builds:
file_name = build["file_name"]
file_path = builder.package_dir / file_name
worker.utils.info(f"Repackaging {file_name}")
if builder.service_env_id == "PROD" and build["risk_id"] != "stable":
raise Exception(
"Can only repackage and deploy stable versions, found risk id '{build['risk_id']}'"
)
version = build["version"]
platform = build["platform"].replace("darwin", "macos")
architecture = build["architecture"].replace("86_", "").replace("amd", "x")
file_extension = build["file_extension"]
current_folder_name = file_path.name[: -len("." + file_extension)]
new_folder_name = f"blender-{version}-{platform}-{architecture}"
new_file_name = f"{new_folder_name}.{file_extension}"
source_file_path = file_path
dest_file_path = deployable_path / new_file_name
worker.utils.info(f"Renaming file [{source_file_path}] to [{dest_file_path}]")
worker.utils.copy_file(source_file_path, dest_file_path)
if file_extension == "zip":
worker.utils.info(f"Renaming internal folder to [{new_folder_name}]")
worker.utils.call(
["7z", "rn", dest_file_path, current_folder_name, new_folder_name]
)
elif file_extension == "tar.xz":
worker.utils.info(f"Extracting [{source_file_path}] to [{dest_file_path}]")
worker.utils.call(["tar", "-xf", source_file_path, "--directory", "."])
worker.utils.remove_file(dest_file_path)
worker.utils.move(
deployable_path / current_folder_name, deployable_path / new_folder_name
)
worker.utils.info(f"Compressing [{new_folder_name}] to [{dest_file_path}]")
cmd = [
"tar",
"-cv",
"--owner=0",
"--group=0",
"--use-compress-program",
"xz -6",
"-f",
dest_file_path,
new_folder_name,
]
worker.utils.call(cmd)
worker.utils.remove_dir(deployable_path / new_folder_name)
checksum_file_paths.append(dest_file_path)
# Create checksums
worker.utils.info("Creating checksums")
os.chdir(deployable_path)
for checksum in checksums:
checksum_text = ""
for filepath in checksum_file_paths:
checksum_line = worker.utils.check_output(
[f"{checksum}sum", filepath.name]
).strip()
checksum_text += checksum_line + "\n"
print(checksum_text)
checksum_filepath = (
deployable_path / f"blender-{version_info.version}.{checksum}"
)
checksum_filepath.write_text(checksum_text)
def deploy(builder: worker.deploy.CodeDeployBuilder) -> None:
# No testable on UATEST currently.
dry_run = builder.service_env_id not in ("LOCAL", "PROD")
worker_config = builder.get_worker_config()
connect_id = f"{worker_config.download_user}@{worker_config.download_machine}"
# Copy source
remote_dest_path = pathlib.Path(worker_config.download_source_folder)
change_modes = ["F0444"]
if builder.service_env_id != "PROD":
# Already assumed to exist on production
worker.utils.call_ssh(
connect_id, ["mkdir", "-p", remote_dest_path], dry_run=dry_run
)
for source_path in builder.package_source_dir.iterdir():
dest_path = f"{connect_id}:{remote_dest_path}/"
worker.utils.info(f"Deploying source package [{source_path}]")
worker.utils.rsync(
source_path,
dest_path,
change_modes=change_modes,
show_names=True,
dry_run=dry_run,
)
worker.utils.call_ssh(
connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run
)
# Copy binaries
version_info = worker.blender.version.VersionInfo(builder)
major_minor_version = version_info.short_version
remote_dest_path = (
pathlib.Path(worker_config.download_release_folder)
/ f"Blender{major_minor_version}"
)
deployable_path = builder.package_dir / "deployable"
change_modes = ["F0444"]
worker.utils.call_ssh(
connect_id, ["mkdir", "-p", remote_dest_path], dry_run=dry_run
)
worker.utils.call_ssh(
connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run
)
for source_path in deployable_path.iterdir():
dest_path = f"{connect_id}:{remote_dest_path}/"
worker.utils.info(f"Deploying binary package [{source_path}]")
worker.utils.rsync(
source_path,
dest_path,
change_modes=change_modes,
show_names=True,
dry_run=dry_run,
)
worker.utils.call_ssh(
connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run
)

View file

@ -0,0 +1,116 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
import re
import time
import urllib.request
import worker.blender.version
import worker.deploy.artifacts
import worker.deploy
import worker.utils
def monitor(builder: worker.deploy.CodeDeployBuilder) -> None:
wait_time_in_seconds = 120
start_time = time.time()
max_time_hours = 4.0
version_info = worker.blender.version.VersionInfo(builder)
required_base_url = "https://mirror.clarkson.edu/blender/release"
monitored_base_urls = [
"https://download.blender.org/release",
"https://ftp.nluug.nl/pub/graphics/blender/release",
"https://ftp.halifax.rwth-aachen.de/blender/release",
"https://mirrors.dotsrc.org/blender/blender-release",
"https://mirrors.ocf.berkeley.edu/blender/release",
"https://mirrors.iu13.net/blender/release",
"https://mirrors.aliyun.com/blender/release",
"https://mirrors.sahilister.in/blender/release",
"https://mirror.freedif.org/blender/release",
required_base_url,
]
stop_on_required_site_found = False
branches_config = builder.get_branches_config()
expected_platforms = branches_config.code_official_platform_architectures[
builder.track_id
]
expected_file_count = len(worker.deploy.artifacts.checksums)
for expected_platform in expected_platforms:
if expected_platform.startswith("windows"):
expected_file_count += 3 # msi, msix, zip
else:
expected_file_count += 1
folder_name = f"Blender{version_info.short_version}"
file_pattern = rf"[Bb]lender-{version_info.version}[\.\-\_a-zA-Z0-9]*"
while True:
found_site_count = 0
print("=" * 80)
# Assume no files are missing
sites_missing_files_count = 0
for base_url in monitored_base_urls:
search_url = f"{base_url}/{folder_name}"
print(f"Checking [{search_url}] for version [{version_info.version}]")
# Header to avoid getting permission denied.
request = urllib.request.Request(
search_url, headers={"User-Agent": "Mozilla"}
)
try:
response = urllib.request.urlopen(request, timeout=5.0)
text = response.read().decode("utf-8", "ignore")
except Exception as e:
print(e)
text = ""
matches = set(re.findall(file_pattern, text))
len(matches)
for match in matches:
print(f"File [{match}]")
if len(matches) == expected_file_count:
found_site_count += 1
elif len(matches) > 0:
sites_missing_files_count += 1
print("-" * 80)
can_stop_monitoring = (
(len(matches) == expected_file_count)
and (base_url == required_base_url)
and (sites_missing_files_count == 0)
)
if stop_on_required_site_found and can_stop_monitoring:
print(f"Required site found [{required_base_url}], stopping")
return
print("")
print("=" * 80)
print(
f"Sites [{found_site_count} of {len(monitored_base_urls)}] have all files"
)
print("=" * 80)
if found_site_count == len(monitored_base_urls):
break
remaining_time_hours = max_time_hours - (time.time() - start_time) / 3600.0
if remaining_time_hours < 0.0:
print("Waited for maximum amount of time, stopping")
break
print(
f"Waiting {wait_time_in_seconds}s, total wait time remaining {remaining_time_hours:.2f}h"
)
time.sleep(wait_time_in_seconds)

View file

@ -0,0 +1,107 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
import json
import os
import urllib.request
import zipfile
import worker.blender
import worker.blender.version
import worker.deploy
import worker.utils
def pull(builder: worker.deploy.CodeDeployBuilder) -> None:
version_info = worker.blender.version.VersionInfo(builder)
worker.utils.info("Cleaning package and download directory")
worker.utils.remove_dir(builder.package_dir)
worker.utils.remove_dir(builder.download_dir)
os.makedirs(builder.package_dir, exist_ok=True)
os.makedirs(builder.download_dir, exist_ok=True)
# Fetch builds information.
env_base_url = {
"LOCAL": "https://builder.blender.org",
"UATEST": "https://builder.uatest.blender.org",
"PROD": "https://builder.blender.org",
}
base_url = env_base_url[builder.service_env_id]
search_url = f"{base_url}/download/bpy/?format=json&v=1"
worker.utils.info(f"Fetching build JSON from [{search_url}]")
builds_response = urllib.request.urlopen(search_url)
builds_json = json.load(builds_response)
# Get builds matching our version.
worker.utils.info("Processing build JSON")
matching_builds = []
for build in builds_json:
if build["version"] != version_info.version:
continue
if not build["file_name"].endswith(".zip"):
continue
worker.utils.info(f"Found {build['file_name']}")
if build["risk_id"] != "stable":
raise Exception("Can not only deploy stable releases")
matching_builds.append(build)
# Check expected platforms
branches_config = builder.get_branches_config()
expected_platforms = branches_config.code_official_platform_architectures[
builder.track_id
]
if len(expected_platforms) != len(matching_builds):
platform_names = "\n".join(expected_platforms)
raise Exception("Unexpected number of builds, expected:\n" + platform_names)
# Download builds.
for build in matching_builds:
file_uri = build["url"]
file_name = build["file_name"]
worker.utils.info(f"Download [{file_uri}]")
download_file_path = builder.download_dir / file_name
urllib.request.urlretrieve(file_uri, download_file_path)
# Unzip.
with zipfile.ZipFile(download_file_path, "r") as zipf:
zipf.extractall(path=builder.package_dir)
worker.utils.remove_dir(builder.download_dir)
def deliver(builder: worker.deploy.CodeDeployBuilder) -> None:
dry_run = builder.service_env_id != "PROD"
wheels = list(builder.package_dir.glob("*.whl"))
# Check expected platforms
branches_config = builder.get_branches_config()
expected_platforms = branches_config.code_official_platform_architectures[
builder.track_id
]
wheel_names = "\n".join([wheel.name for wheel in wheels])
wheel_paths = [str(wheel) for wheel in wheels]
print(wheel_names)
if len(expected_platforms) != len(wheels):
raise Exception("Unexpected number of wheels:\n" + wheel_names)
# Check wheels
cmd = ["twine", "check"] + wheel_paths
worker.utils.call(cmd)
# Upload
worker_config = builder.get_worker_config()
env = os.environ.copy()
env["TWINE_USERNAME"] = "__token__"
env["TWINE_PASSWORD"] = worker_config.pypi_token(builder.service_env_id)
env["TWINE_REPOSITORY_URL"] = "https://upload.pypi.org/legacy/"
cmd = ["twine", "upload", "--verbose", "--non-interactive"] + wheel_paths
worker.utils.call(cmd, env=env, dry_run=dry_run)

View file

@ -0,0 +1,175 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
import json
import os
import worker.blender.version
import worker.deploy
import worker.utils
def package(builder: worker.deploy.CodeStoreBuilder) -> None:
dry_run = False
if builder.service_env_id == "LOCAL" and not (
builder.platform == "linux" and worker.utils.is_tool("snapcraft")
):
worker.utils.warning("Performing dry run on LOCAL service environment")
dry_run = True
elif not builder.platform == "linux":
raise Exception("Can only run snapcraft on Linux, aborting")
version_info = worker.blender.version.VersionInfo(builder)
needs_stable_grade = version_info.risk_id in ["candidate", "stable"]
grade = "stable" if needs_stable_grade else "devel"
# Clean directory
for old_package_file in builder.store_snap_dir.glob("*.tar.xz"):
worker.utils.remove_file(old_package_file)
os.makedirs(builder.store_snap_dir, exist_ok=True)
# Get input package file path
package_manifest = builder.package_dir / "manifest.json"
builds = json.loads(package_manifest.read_text())
linux_package_file_path = None
for build in builds:
if build["platform"] == "linux" and build["file_extension"] == "tar.xz":
linux_package_file_path = builder.package_dir / build["file_name"]
break
if not linux_package_file_path:
raise Exception(f"Linux package not found in [{builder.package_dir}] manifest")
source_file_path = linux_package_file_path
dest_file_path = builder.store_snap_dir / linux_package_file_path.name
worker.utils.info(f"Copy file [{source_file_path}] -> [{dest_file_path}]")
worker.utils.copy_file(source_file_path, dest_file_path)
freedesktop_path = builder.code_path / "release" / "freedesktop"
snap_source_root_path = freedesktop_path / "snap"
blender_icon_file_name = "blender.svg"
snapcraft_template_file_path = (
snap_source_root_path / "blender-snapcraft-template.yaml"
)
worker.utils.info(f"Using snap config file [{snapcraft_template_file_path}]")
snapcraft_text = snapcraft_template_file_path.read_text()
snapcraft_text = snapcraft_text.replace("@VERSION@", version_info.version)
snapcraft_text = snapcraft_text.replace("@GRADE@", grade)
snapcraft_text = snapcraft_text.replace(
"@ICON_PATH@", f"./{blender_icon_file_name}"
)
snapcraft_text = snapcraft_text.replace(
"@PACKAGE_PATH@", f"./{linux_package_file_path.name}"
)
snapcraft_file_path = builder.store_snap_dir / "snapcraft.yaml"
worker.utils.info(f"Saving snapcraft config file [{snapcraft_file_path}]")
snapcraft_file_path.write_text(snapcraft_text)
print(snapcraft_text)
snap_package_file_name = f"blender_{version_info.version}_amd64.snap"
snap_package_file_path = builder.store_snap_dir / snap_package_file_name
if snap_package_file_path.exists():
worker.utils.info(f"Clearing snap file [{snap_package_file_path}]")
worker.utils.remove_file(snap_package_file_path)
os.chdir(builder.store_snap_dir)
# Copy all required files into working folder
source_file_path = (
freedesktop_path / "icons" / "scalable" / "apps" / blender_icon_file_name
)
dest_file_path = builder.store_snap_dir / "blender.svg"
worker.utils.info(f"Copy file [{source_file_path}] -> [{dest_file_path}]")
worker.utils.copy_file(source_file_path, dest_file_path)
source_file_path = snap_source_root_path / "blender-wrapper"
dest_file_path = builder.store_snap_dir / "blender-wrapper"
worker.utils.info(f"Copy file [{source_file_path}] -> [{dest_file_path}]")
worker.utils.copy_file(source_file_path, dest_file_path)
worker.utils.call(["snapcraft", "clean", "--use-lxd"], dry_run=dry_run)
worker.utils.call(["snapcraft", "--use-lxd"], dry_run=dry_run)
worker.utils.call(
["review-tools.snap-review", snap_package_file_path, "--allow-classic"],
dry_run=dry_run,
)
if dry_run:
snap_package_file_path.write_text("Dry run dummy package file")
worker.utils.info("To test the snap package run this command")
print("sudo snap remove blender")
print(f"sudo snap install --dangerous --classic {snap_package_file_path}")
def deliver(builder: worker.deploy.CodeStoreBuilder) -> None:
dry_run = False
if builder.service_env_id == "LOCAL":
worker.utils.warning("Performing dry run on LOCAL service environment")
dry_run = True
elif not builder.platform == "linux":
raise Exception("Can only run snapcraft on Linux, aborting")
version_info = worker.blender.version.VersionInfo(builder)
branches_config = builder.get_branches_config()
is_lts = builder.track_id in branches_config.all_lts_tracks
is_latest = (
branches_config.track_major_minor_versions[builder.track_id]
== version_info.short_version
)
# Never push to stable
snap_risk_id = version_info.risk_id.replace("stable", "candidate").replace(
"alpha", "edge"
)
if snap_risk_id == "stable":
raise Exception("Delivery to [stable] channel not allowed")
snap_track_id = version_info.short_version
if is_lts:
snap_track_id += "lts"
needs_release = True
elif is_latest:
# latest/edge always vdev
snap_track_id = "latest"
needs_release = True
else:
# Push current release under development to beta or candidate
needs_release = True
# worker.utils.call(["snapcraft", "list-tracks", "blender"], dry_run=dry_run)
snap_package_file_name = f"blender_{version_info.version}_amd64.snap"
snap_package_file_path = builder.store_snap_dir / snap_package_file_name
if not snap_package_file_path.exists():
raise Exception(f"Snap file [{snap_package_file_path}] missing")
worker_config = builder.get_worker_config()
env = os.environ.copy()
env["SNAPCRAFT_STORE_CREDENTIALS"] = worker_config.snap_credentials(
builder.service_env_id
)
# If this fails, then the permissions were not set correcty with acls
worker.utils.call(["snapcraft", "status", "blender"], dry_run=dry_run, env=env)
if needs_release:
# Upload and release.
snap_channel = f"{snap_track_id}/{snap_risk_id}"
cmd = ["snapcraft", "upload", "--release", snap_channel, snap_package_file_path]
else:
# Upload only.
snap_channel = ""
cmd = ["snapcraft", "upload", snap_package_file_path]
# Some api call is making this fail, seems to be status based as we can upload and set channel
worker.utils.call(cmd, retry_count=5, retry_wait_time=120, dry_run=dry_run, env=env)
if needs_release:
worker.utils.info("To test the snap package run this command")
print(f"sudo snap refresh blender --classic --channel {snap_channel}")

View file

@ -0,0 +1,40 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
import os
import worker.blender.version
import worker.deploy
import worker.utils
def _package(
builder: worker.deploy.CodeDeployBuilder, needs_complete: bool = False
) -> None:
os.chdir(builder.code_path)
if needs_complete:
worker.utils.call(["make", "source_archive_complete"])
else:
worker.utils.call(["make", "source_archive"])
# The make change scripts writes to a different location since 2.83.
for source_file in builder.code_path.glob("blender-*.tar.xz*"):
worker.utils.move(source_file, builder.package_source_dir / source_file.name)
for source_file in builder.track_path.glob("blender-*.tar.xz*"):
worker.utils.move(source_file, builder.package_source_dir / source_file.name)
def package(builder: worker.deploy.CodeDeployBuilder) -> None:
print(f"Cleaning path [{builder.package_source_dir}]")
worker.utils.remove_dir(builder.package_source_dir)
os.makedirs(builder.package_source_dir, exist_ok=True)
_package(builder, needs_complete=False)
version_info = worker.blender.version.VersionInfo(builder)
if version_info.patch != 0:
worker.utils.info("Skipping complete source package for patch release")
return
_package(builder, needs_complete=True)

View file

@ -0,0 +1,271 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
import json
import os
import pathlib
import time
import worker.blender.version
import worker.deploy
import worker.utils
def extract_file(
builder: worker.deploy.CodeStoreBuilder,
source_file_path: pathlib.Path,
platform: str,
) -> None:
worker.utils.info(f"Extracting artifact [{source_file_path}] for Steam")
if not source_file_path.exists():
raise Exception("File not found, aborting")
dest_extract_path = builder.store_steam_dir / platform
dest_content_path = dest_extract_path / "content"
worker.utils.remove_dir(dest_extract_path)
worker.utils.remove_dir(dest_content_path)
os.makedirs(dest_extract_path, exist_ok=True)
if platform == "linux":
worker.utils.info(f"Extract [{source_file_path}] to [{dest_extract_path}]")
cmd = ["tar", "-xf", source_file_path, "--directory", dest_extract_path]
worker.utils.call(cmd)
# Move any folder there as ./content
for source_content_path in dest_extract_path.iterdir():
if source_content_path.is_dir():
worker.utils.info(
f"Move [{source_content_path.name}] -> [{dest_content_path}]"
)
worker.utils.move(source_content_path, dest_content_path)
break
elif platform == "darwin":
source_content_path = dest_extract_path / "Blender"
if source_content_path.exists():
worker.utils.info(f"Removing [{source_content_path}]")
worker.utils.remove_dir(source_content_path)
image_file_path = source_file_path.with_suffix(".img")
cmd = ["dmg2img", "-v", "-i", source_file_path, "-o", image_file_path]
worker.utils.call(cmd)
cmd = ["7z", "x", f"-o{dest_extract_path}", image_file_path]
worker.utils.call(cmd)
os.makedirs(dest_content_path, exist_ok=True)
worker.utils.remove_file(image_file_path)
worker.utils.info(
f"Move Blender app from [{source_content_path}] -> [{dest_content_path}]"
)
worker.utils.move(
source_content_path / "Blender.app", dest_content_path / "Blender.app"
)
worker.utils.remove_dir(source_content_path)
elif platform == "windows":
worker.utils.info(f"Extracting zip file [{source_file_path}]")
cmd = ["7z", "x", f"-o{dest_extract_path}", source_file_path]
worker.utils.call(cmd)
# Move any folder there as ./content
for source_content_path in dest_extract_path.iterdir():
if source_content_path.is_dir():
worker.utils.info(
f"Move [{source_content_path.name}] -> [{dest_content_path}]"
)
worker.utils.move(source_content_path, dest_content_path)
break
else:
raise Exception(f"Don't know how to extract for platform [{platform}]")
def extract(builder: worker.deploy.CodeStoreBuilder) -> None:
package_manifest = builder.package_dir / "manifest.json"
builds = json.loads(package_manifest.read_text())
for build in builds:
if build["file_extension"] not in ["zip", "tar.xz", "dmg"]:
continue
if build["architecture"] == "arm64":
continue
file_path = builder.package_dir / build["file_name"]
platform = build["platform"]
extract_file(builder, file_path, platform)
def build(builder: worker.deploy.CodeStoreBuilder, is_preview: bool) -> None:
dry_run = False
if builder.service_env_id == "LOCAL":
worker.utils.warning("Performing dry run on LOCAL service environment")
dry_run = True
version_info = worker.blender.version.VersionInfo(builder)
branches_config = builder.get_branches_config()
is_lts = builder.track_id in branches_config.all_lts_tracks
is_latest = (
branches_config.track_major_minor_versions["vdev"] == version_info.short_version
)
log_path = builder.track_path / "log"
worker.utils.remove_dir(log_path)
os.makedirs(log_path, exist_ok=True)
worker_config = builder.get_worker_config()
steam_credentials = worker_config.steam_credentials(builder.service_env_id)
steam_user_id, steam_user_password = steam_credentials
if not steam_user_id or not steam_user_password:
if not dry_run:
raise Exception("Steam user id or password not available, aborting")
env = os.environ.copy()
env["PATH"] = env["PATH"] + os.pathsep + "/usr/games"
cmd: worker.utils.CmdSequence = [
"steamcmd",
"+login",
worker.utils.HiddenArgument(steam_user_id),
worker.utils.HiddenArgument(steam_user_password),
"+quit",
]
worker.utils.call(cmd, dry_run=dry_run, env=env)
worker.utils.info("Waiting 5 seconds for next steam command")
time.sleep(5.0)
steam_app_id = worker_config.steam_app_id
steam_platform_depot_ids = worker_config.steam_platform_depot_ids
for platform_id in ["linux", "darwin", "windows"]:
worker.utils.info(f"Platform {platform_id}")
platform_depot_id = steam_platform_depot_ids[platform_id]
track_build_root_path = builder.store_steam_dir / platform_id
if not track_build_root_path.exists():
raise Exception(f"Folder {track_build_root_path} does not exist")
platform_build_file_path = track_build_root_path / "depot_build.vdf"
source_root_path = track_build_root_path / "content"
if not source_root_path.exists():
raise Exception(f"Folder {source_root_path} does not exist")
dest_root_path = track_build_root_path / "output"
# Steam branches cannot be uper case and no spaces allowed
# Branches are named "daily" and "devtest" on Steam, so rename those.
steam_branch_id = builder.service_env_id.lower()
steam_branch_id = steam_branch_id.replace("prod", "daily")
steam_branch_id = steam_branch_id.replace("uatest", "devtest")
if is_lts:
# daily-X.X and devtest-X.X branches for LTS.
steam_branch_id = f"{steam_branch_id}-{version_info.short_version}"
elif is_latest:
# daily and devtest branches for main without suffix.
pass
else:
# Not setting this live.
steam_branch_id = ""
preview = "1" if is_preview else "0"
app_build_script = f"""
"appbuild"
{{
"appid" "{steam_app_id}"
"desc" "Blender {version_info.version}" // description for this build
"buildoutput" "{dest_root_path}" // build output folder for .log, .csm & .csd files, relative to location of this file
"contentroot" "{source_root_path}" // root content folder, relative to location of this file
"setlive" "{steam_branch_id}" // branch to set live after successful build, non if empty
"preview" "{preview}" // 1 to enable preview builds, 0 to commit build to steampipe
"local" "" // set to file path of local content server
"depots"
{{
"{platform_depot_id}" "{platform_build_file_path}"
}}
}}
"""
platform_build_script = f"""
"DepotBuildConfig"
{{
// Set your assigned depot ID here
"DepotID" "{platform_depot_id}"
// Set a root for all content.
// All relative paths specified below (LocalPath in FileMapping entries, and FileExclusion paths)
// will be resolved relative to this root.
// If you don't define ContentRoot, then it will be assumed to be
// the location of this script file, which probably isn't what you want
"ContentRoot" "{source_root_path}"
// include all files recursivley
"FileMapping"
{{
// This can be a full path, or a path relative to ContentRoot
"LocalPath" "*"
// This is a path relative to the install folder of your game
"DepotPath" "."
// If LocalPath contains wildcards, setting this means that all
// matching files within subdirectories of LocalPath will also
// be included.
"recursive" "1"
}}
// but exclude all symbol files
// This can be a full path, or a path relative to ContentRoot
//"FileExclusion" "*.pdb"
}}
"""
(track_build_root_path / "app_build.vdf").write_text(app_build_script)
platform_build_file_path.write_text(platform_build_script)
worker.utils.info(
f"Version [{version_info.version}] for [{platform_id}] in preview [{is_preview}] for steam branch [{steam_branch_id}], building"
)
cmd = [
"steamcmd",
"+login",
worker.utils.HiddenArgument(steam_user_id),
worker.utils.HiddenArgument(steam_user_password),
"+run_app_build",
track_build_root_path / "app_build.vdf",
"+quit",
]
retry_count = 0 if preview else 3
worker.utils.call(
cmd, retry_count=retry_count, retry_wait_time=120, dry_run=dry_run, env=env
)
worker.utils.info("Waiting 5 seconds for next steam command")
time.sleep(5.0)
worker.utils.info(
f"Version [{version_info.version}] for [{platform_id}] in preview [{is_preview}] is done, success"
)
def package(builder: worker.deploy.CodeStoreBuilder) -> None:
worker.utils.remove_dir(builder.store_steam_dir)
os.makedirs(builder.store_steam_dir, exist_ok=True)
# Extract and prepare content
extract(builder)
build(builder, is_preview=True)
def deliver(builder: worker.deploy.CodeStoreBuilder) -> None:
# This will push to the store
build(builder, is_preview=False)

View file

@ -0,0 +1,122 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
import json
import os
import worker.blender.pack
import worker.blender.sign
import worker.blender.version
import worker.blender.msix_package
import worker.deploy
import worker.utils
def _package_architecture(
builder: worker.deploy.CodeStoreBuilder, architecture: str, dry_run: bool
) -> None:
version_info = worker.blender.version.VersionInfo(builder)
# Revision with MS Store must be set to 0
revision_id = 0
branches_config = builder.get_branches_config()
is_lts = builder.track_id in branches_config.windows_store_lts_tracks
base_build_number = 0
build_number = version_info.patch + base_build_number
worker.utils.info(f"Builder number {build_number}")
store_version_id = f"{version_info.short_version}.{build_number}.{revision_id}"
worker.utils.info(f"Store version ID {store_version_id}")
worker.utils.info(f"Cleaning path [{builder.store_windows_dir}]")
worker.utils.remove_dir(builder.store_windows_dir)
os.makedirs(builder.store_windows_dir, exist_ok=True)
os.chdir(builder.store_windows_dir)
# Find input zip package.
package_manifest = builder.package_dir / "manifest.json"
builds = json.loads(package_manifest.read_text())
input_file_path = None
for build in builds:
if (
build["platform"] == "windows"
and build["file_extension"] == "zip"
and build["architecture"] == architecture
):
input_file_path = builder.package_dir / build["file_name"]
break
if not input_file_path:
raise Exception(
f"Windows package not found in [{builder.package_dir}] manifest"
)
# Copy all required files into working folder
source_path = builder.code_path / "release" / "windows" / "msix"
dest_path = builder.store_windows_dir
worker.utils.info(
f"Copying [{source_path}] -> [{dest_path}] for windows store packaging"
)
for source_file in source_path.iterdir():
if source_file.name == "README.md":
continue
if source_file.is_dir():
worker.utils.copy_dir(source_file, dest_path / source_file.name)
else:
worker.utils.copy_file(source_file, dest_path / source_file.name)
worker_config = builder.get_worker_config()
cert_subject = worker_config.windows_store_certificate(builder.service_env_id)
certificate_id = f"CN={cert_subject}"
msix_filepath = worker.blender.msix_package.pack(
store_version_id, input_file_path, certificate_id, lts=is_lts, dry_run=dry_run
)
if worker_config.windows_store_self_sign:
worker.blender.sign.sign_windows_files(
builder.service_env_id, [msix_filepath], certificate_id=certificate_id
)
if dry_run:
msix_filepath.write_text("Dry run dummy package file")
# Clear out all msix files first
for old_msix_filepath in builder.package_dir.glob("*.msix"):
worker.utils.remove_file(old_msix_filepath)
dest_path = builder.package_dir / msix_filepath.name
worker.utils.info(f"Copying [{msix_filepath}] -> [{dest_path}] for distribution")
worker.utils.copy_file(msix_filepath, dest_path)
worker.blender.pack.generate_file_hash(dest_path)
def package(builder: worker.deploy.CodeStoreBuilder) -> None:
dry_run = False
if not builder.platform == "windows":
if builder.service_env_id == "LOCAL":
worker.utils.warning("Performing dry run on LOCAL service environment")
dry_run = True
else:
raise Exception("Can only run this on Windows, aborting")
branches_config = builder.get_branches_config()
expected_platforms = branches_config.code_official_platform_architectures[
builder.track_id
]
for expected_platform in expected_platforms:
if expected_platform.startswith("windows"):
architecture = expected_platform.split("-")[1]
_package_architecture(builder, architecture, dry_run)
def deliver(builder: worker.deploy.CodeStoreBuilder) -> None:
worker.utils.info("Windows store delivery not implemented")