Add back further changes from blender-devops

This commit is contained in:
Bart van der Braak 2024-11-19 21:41:39 +01:00
parent 18e653fd2e
commit 0a1454d250
61 changed files with 7917 additions and 1 deletions

View file

@ -0,0 +1,185 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
import argparse
import os
import pathlib
import re
import subprocess
from collections import OrderedDict
from typing import Callable, Any
import worker.utils
class CodeBuilder(worker.utils.Builder):
def __init__(self, args: argparse.Namespace):
super().__init__(args, "blender", "blender")
self.needs_full_clean = args.needs_full_clean
self.needs_gpu_binaries = args.needs_gpu_binaries
self.needs_gpu_tests = args.needs_gpu_tests
self.needs_ninja = True
self.python_module = args.python_module
self.build_configuration = args.build_configuration
track_path: pathlib.Path = self.track_path
if self.platform in {"darwin", "windows"}:
if len(args.architecture):
self.architecture = args.architecture
if self.platform == "darwin":
self.build_dir = track_path / f"build_{self.architecture}_{self.build_configuration}"
else:
self.build_dir = track_path / f"build_{self.build_configuration}"
self.blender_dir = track_path / "blender.git"
self.install_dir = track_path / f"install_{self.build_configuration}"
self.package_dir = track_path / "build_package"
self.build_doc_path = track_path / "build_doc_api"
def clean(self):
worker.utils.remove_dir(self.install_dir)
worker.utils.remove_dir(self.package_dir)
worker.utils.remove_dir(self.build_doc_path)
# Call command with in compiler environment.
def call(self, cmd: worker.utils.CmdSequence, env: worker.utils.CmdEnvironment = None) -> int:
cmd_prefix: worker.utils.CmdList = []
if self.platform == "darwin":
# On macOS, override Xcode version if requested.
pipeline_config = self.pipeline_config()
xcode = pipeline_config.get("xcode", None)
xcode_version = xcode.get("version", None) if xcode else None
if xcode_version:
developer_dir = f"/Applications/Xcode-{xcode_version}.app/Contents/Developer"
else:
developer_dir = "/Applications/Xcode.app/Contents/Developer"
if self.service_env_id == "LOCAL" and not pathlib.Path(developer_dir).exists():
worker.utils.warning(
f"Skip using non-existent {developer_dir} in LOCAL service environment"
)
else:
cmd_prefix = ["xcrun"]
env = dict(env) if env else os.environ.copy()
env["DEVELOPER_DIR"] = developer_dir
elif worker.utils.is_tool("scl"):
pipeline_config = self.pipeline_config()
gcc_version = pipeline_config["gcc"]["version"]
gcc_major_version = gcc_version.split(".")[0]
# On Rocky
if os.path.exists("/etc/rocky-release"):
# Stub to override configured GCC version, remove when blender build config is fixed
gcc_major_version = "11"
cmd_prefix = ["scl", "enable", f"gcc-toolset-{gcc_major_version}", "--"]
return worker.utils.call(cmd_prefix + list(cmd), env=env)
def pipeline_config(self) -> dict:
config_file_path = self.code_path / "build_files" / "config" / "pipeline_config.json"
if not config_file_path.exists():
config_file_path = config_file_path.with_suffix(".yaml")
if not config_file_path.exists():
raise Exception(f"Config file [{config_file_path}] not found, aborting")
with open(config_file_path, "r") as read_file:
if config_file_path.suffix == ".json":
import json
pipeline_config = json.load(read_file)
else:
import yaml
pipeline_config = yaml.load(read_file, Loader=yaml.SafeLoader)
return pipeline_config["buildbot"]
def blender_command_path(self) -> pathlib.Path:
if self.platform == "darwin":
return self.install_dir / "Blender.app" / "Contents" / "macOS" / "Blender"
elif self.platform == "windows":
return self.install_dir / "blender.exe"
else:
return self.install_dir / "blender"
def setup_build_environment(self) -> None:
if self.platform != "windows":
return
# CMake goes first to avoid using chocolaty cpack command.
worker.utils.info("Setting CMake path")
os.environ["PATH"] = "C:\\Program Files\\CMake\\bin" + os.pathsep + os.environ["PATH"]
worker.utils.info("Setting VC Tools env variables")
windows_build_version = "10.0.19041.0"
os.environ["PATH"] = (
f"C:\\Program Files (x86)\\Windows Kits\\10\\bin\\{windows_build_version}\\x64"
+ os.pathsep
+ os.environ["PATH"]
)
os.environ["PATH"] = (
"C:\\Program Files (x86)\\WiX Toolset v3.11\\bin" + os.pathsep + os.environ["PATH"]
)
if self.architecture == "arm64":
vs_build_tool_path = pathlib.Path(
"C:\\Program Files\\Microsoft Visual Studio\\2022\\Community\\VC\\Auxiliary\\Build\\vcvarsarm64.bat"
)
vs_tool_install_dir_suffix = "\\bin\\Hostarm64\\arm64"
else:
vs_build_tool_path = pathlib.Path(
"C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community\\VC\\Auxiliary\\Build\\vcvars64.bat"
)
vs_tool_install_dir_suffix = "\\bin\\Hostx64\\x64"
vcvars_output = subprocess.check_output([vs_build_tool_path, "&&", "set"], shell=True)
vcvars_text = vcvars_output.decode("utf-8", "ignore")
for line in vcvars_text.splitlines():
match = re.match(r"(.*?)=(.*)", line)
if match:
key = match.group(1)
value = match.group(2)
if key not in os.environ:
if key not in ["PROMPT", "Path"]:
worker.utils.info(f"Adding key {key}={value}")
os.environ[key] = value
os.environ["PATH"] = (
os.environ["VCToolsInstallDir"]
+ vs_tool_install_dir_suffix
+ os.pathsep
+ os.environ["PATH"]
)
def create_argument_parser(steps: worker.utils.BuilderSteps) -> argparse.ArgumentParser:
parser = worker.utils.create_argument_parser(steps=steps)
parser.add_argument("--needs-full-clean", action="store_true", required=False)
parser.add_argument("--needs-gpu-binaries", action="store_true", required=False)
parser.add_argument("--needs-gpu-tests", action="store_true", required=False)
parser.add_argument("--python-module", action="store_true", required=False)
parser.add_argument(
"--build-configuration",
default="release",
type=str,
choices=["release", "asserts", "sanitizer", "debug"],
required=False,
)
parser.add_argument(
"--architecture",
default="",
type=str,
choices=["arm64", "x86_64", "amd64"],
required=False,
)
return parser

View file

@ -0,0 +1,125 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
import json
import os
import pathlib
import urllib.request
import sys
import conf.worker
import worker.blender
import worker.utils
def create_upload(
builder: worker.blender.CodeBuilder, benchmark_path: pathlib.Path, revision: str
) -> None:
# Create package directory.
branch = builder.branch_id.replace("blender-", "").replace("-release", "")
name = f"{branch}-{builder.platform}-{builder.architecture}"
package_dir = builder.package_dir / name
worker.utils.remove_dir(package_dir)
os.makedirs(package_dir, exist_ok=True)
# Fetch existing summary
worker_config = conf.worker.get_config(builder.service_env_id)
base_urls = {
"LOCAL": str(worker_config.buildbot_download_folder),
"UATEST": "https://builder.uatest.blender.org/download",
"PROD": "https://builder.blender.org/download",
}
base_url = base_urls[builder.service_env_id]
summary_json_url = f"{base_url}/daily/benchmarks/{name}/summary.json"
summary_json_path = package_dir / "summary.json"
try:
if builder.service_env_id == "LOCAL":
worker.utils.copy_file(pathlib.Path(summary_json_url), summary_json_path)
else:
urllib.request.urlretrieve(summary_json_url, summary_json_path)
except Exception as e:
error_msg = str(e)
worker.utils.warning(f"Could not retrieve benchmark summary.json: {error_msg}")
# Create json files in package directory.
results_json_path = benchmark_path / "results.json"
revision_json_path = package_dir / f"{revision}.json"
worker.utils.copy_file(results_json_path, revision_json_path)
summary_json = []
if summary_json_path.exists():
summary_json = json.loads(summary_json_path.read_text())
summary_json += json.loads(results_json_path.read_text())
summary_json_path.write_text(json.dumps(summary_json, indent=2))
# Create html file in package directory.
report_html_path = package_dir / "report.html"
cmd = [
sys.executable,
builder.code_path / "tests" / "performance" / "benchmark.py",
"graph",
summary_json_path,
"-o",
report_html_path,
]
worker.utils.call(cmd)
def benchmark(builder: worker.blender.CodeBuilder) -> None:
# Parameters
os.chdir(builder.code_path)
revision = worker.utils.check_output(["git", "rev-parse", "HEAD"])
revision = revision[:12]
blender_command = builder.blender_command_path()
gpu_device = "METAL" if builder.platform == "darwin" else "OPTIX"
background = False if builder.platform == "darwin" else True
worker.utils.info(f"Benchmark revision {revision}, GPU device {gpu_device}")
# Create clean benchmark folder
benchmark_path = builder.track_path / "benchmark" / "default"
worker.utils.remove_dir(benchmark_path)
os.makedirs(benchmark_path, exist_ok=True)
# Initialize configuration
config_py_path = benchmark_path / "config.py"
config_py_text = f"""
devices = ["CPU", "{gpu_device}_0"]
background = {background}
builds = {{"{revision}": "{blender_command}"}}
benchmark_type = "time_series"
"""
config_py_path.write_text(config_py_text)
# Checkout benchmark files
tests_benchmarks_path = builder.code_path / "tests" / "benchmarks"
if not tests_benchmarks_path.exists():
benchmarks_url = "https://projects.blender.org/blender/blender-benchmarks.git"
worker.utils.call(["git", "clone", benchmarks_url, tests_benchmarks_path])
# Run benchmark
cmd = [
sys.executable,
builder.code_path / "tests" / "performance" / "benchmark.py",
"list",
]
worker.utils.call(cmd)
cmd = [
sys.executable,
builder.code_path / "tests" / "performance" / "benchmark.py",
"run",
"default",
]
exit_code = worker.utils.call(cmd, exit_on_error=False)
# Write results to be uploaded
create_upload(builder, benchmark_path, revision)
sys.exit(exit_code)

View file

@ -0,0 +1,25 @@
tell application "Finder"
tell disk "Blender"
log "applescript: opening [Blender]. This will seem to hang with a pop up dialog on applescript permissions for the first run. You have 10 minutes, get on machine now and push that button !!!"
with timeout of 600 seconds
open
log "applescript: yay it opened !"
log "applescript: setting current view"
set current view of container window to icon view
set toolbar visible of container window to false
set statusbar visible of container window to false
set the bounds of container window to {100, 100, 640, 472}
set theViewOptions to icon view options of container window
set arrangement of theViewOptions to not arranged
set icon size of theViewOptions to 128
set background picture of theViewOptions to file ".background:background.tif"
set position of item " " of container window to {400, 190}
set position of item "blender.app" of container window to {135, 190}
log "applescript: updating applications"
update without registering applications
delay 5
log "applescript: closing"
close
end timeout
end tell
end tell

View file

@ -0,0 +1,473 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
import os
import re
import time
import subprocess
import platform
import pathlib
import tempfile
import typing
import worker.utils
# Extra size which is added on top of actual files size when estimating size
# of destination DNG.
_extra_dmg_size_in_bytes = 800 * 1024 * 1024
################################################################################
# Common utilities
def get_directory_size(root_directory: pathlib.Path) -> int:
"""
Get size of directory on disk
"""
total_size = 0
for file in root_directory.glob("**/*"):
total_size += file.lstat().st_size
return total_size
################################################################################
# DMG bundling specific logic
def collect_app_bundles(source_dir: pathlib.Path) -> typing.List[pathlib.Path]:
"""
Collect all app bundles which are to be put into DMG
If the source directory points to FOO.app it will be the only app bundle
packed.
Otherwise all .app bundles from given directory are placed to a single
DMG.
"""
if source_dir.name.endswith(".app"):
return [source_dir]
app_bundles = []
for filename in source_dir.glob("*"):
if not filename.is_dir():
continue
if not filename.name.endswith(".app"):
continue
app_bundles.append(filename)
return app_bundles
def collect_and_log_app_bundles(source_dir: pathlib.Path) -> typing.List[pathlib.Path]:
app_bundles = collect_app_bundles(source_dir)
if not app_bundles:
worker.utils.info("No app bundles found for packing")
return []
worker.utils.info(f"Found {len(app_bundles)} to pack:")
for app_bundle in app_bundles:
worker.utils.info(f"- {app_bundle}")
return app_bundles
def estimate_dmg_size(app_bundles: typing.List[pathlib.Path]) -> int:
"""
Estimate size of DMG to hold requested app bundles
The size is based on actual size of all files in all bundles plus some
space to compensate for different size-on-disk plus some space to hold
codesign signatures.
Is better to be on a high side since the empty space is compressed, but
lack of space might cause silent failures later on.
"""
app_bundles_size = 0
for app_bundle in app_bundles:
app_bundles_size += get_directory_size(app_bundle)
return app_bundles_size + _extra_dmg_size_in_bytes
def copy_app_bundles(app_bundles: typing.List[pathlib.Path], dir_path: pathlib.Path) -> None:
"""
Copy all bundles to a given directory
This directory is what the DMG will be created from.
"""
for app_bundle in app_bundles:
destination_dir_path = dir_path / app_bundle.name
worker.utils.info(f"Copying app bundle [{app_bundle}] to [{dir_path}]")
worker.utils.copy_dir(app_bundle, destination_dir_path)
# Only chmod if we can;t get cmake install to do it - james
# for r, d, f in os.walk(destination_dir_path):
# worker.utils.info(f'chmoding [{r}] -> 0o755')
# os.chmod(r, 0o755)
def get_main_app_bundle(app_bundles: typing.List[pathlib.Path]) -> pathlib.Path:
"""
Get application bundle main for the installation
"""
return app_bundles[0]
def create_dmg_image(
app_bundles: typing.List[pathlib.Path], dmg_file_path: pathlib.Path, volume_name: str
) -> None:
"""
Create DMG disk image and put app bundles in it
No DMG configuration or codesigning is happening here.
"""
if dmg_file_path.exists():
worker.utils.info(f"Removing existing writable DMG {dmg_file_path}...")
worker.utils.remove_file(dmg_file_path)
temp_content_path = tempfile.TemporaryDirectory(prefix="blender-dmg-content-")
worker.utils.info(f"Preparing directory with app bundles for the DMG [{temp_content_path}]")
with temp_content_path as content_dir_str:
# Copy all bundles to a clean directory.
content_dir_path = pathlib.Path(content_dir_str)
# worker.utils.info(f'content_dir_path={content_dir_path}')
copy_app_bundles(app_bundles, content_dir_path)
# Estimate size of the DMG.
dmg_size = estimate_dmg_size(app_bundles)
worker.utils.info(f"Estimated DMG size: [{dmg_size:,}] bytes.")
# Create the DMG.
worker.utils.info(f"Creating writable DMG [{dmg_file_path}]")
command = (
"hdiutil",
"create",
"-size",
str(dmg_size),
"-fs",
"HFS+",
"-srcfolder",
content_dir_path,
"-volname",
volume_name,
"-format",
"UDRW",
"-mode",
"755",
dmg_file_path,
)
worker.utils.call(command)
def get_writable_dmg_file_path(dmg_file_path: pathlib.Path) -> pathlib.Path:
"""
Get file path for writable DMG image
"""
parent = dmg_file_path.parent
return parent / (dmg_file_path.stem + "-temp.dmg")
def mount_readwrite_dmg(dmg_file_path: pathlib.Path) -> None:
"""
Mount writable DMG
Mounting point would be /Volumes/<volume name>
"""
worker.utils.info(f"Mounting read-write DMG ${dmg_file_path}")
cmd: worker.utils.CmdSequence = [
"hdiutil",
"attach",
"-readwrite",
"-noverify",
"-noautoopen",
dmg_file_path,
]
worker.utils.call(cmd)
def get_mount_directory_for_volume_name(volume_name: str) -> pathlib.Path:
"""
Get directory under which the volume will be mounted
"""
return pathlib.Path("/Volumes") / volume_name
def eject_volume(volume_name: str) -> None:
"""
Eject given volume, if mounted
"""
mount_directory = get_mount_directory_for_volume_name(volume_name)
if not mount_directory.exists():
return
mount_directory_str = str(mount_directory)
worker.utils.info(f"Ejecting volume [{volume_name}]")
# First try through Finder, as sometimes diskutil fails for unknown reasons.
command = [
"osascript",
"-e",
f"""tell application "Finder" to eject (every disk whose name is "{volume_name}")""",
]
worker.utils.call(command)
if not mount_directory.exists():
return
# Figure out which device to eject.
mount_output = subprocess.check_output(["mount"]).decode()
device = ""
for line in mount_output.splitlines():
if f"on {mount_directory_str} (" not in line:
continue
tokens = line.split(" ", 3)
if len(tokens) < 3:
continue
if tokens[1] != "on":
continue
if device:
raise Exception(f"Multiple devices found for mounting point [{mount_directory}]")
device = tokens[0]
if not device:
raise Exception(f"No device found for mounting point [{mount_directory}]")
worker.utils.info(f"[{mount_directory}] is mounted as device [{device}], ejecting...")
command = ["diskutil", "eject", device]
worker.utils.call(command)
def copy_background_if_needed(
background_image_file_path: pathlib.Path, mount_directory: pathlib.Path
) -> None:
"""
Copy background to the DMG
If the background image is not specified it will not be copied.
"""
if not background_image_file_path:
worker.utils.info("No background image provided.")
return
destination_dir = mount_directory / ".background"
destination_dir.mkdir(exist_ok=True)
destination_file_path = destination_dir / background_image_file_path.name
worker.utils.info(
f"Copying background image [{background_image_file_path}] to [{destination_file_path}]"
)
worker.utils.copy_file(background_image_file_path, destination_file_path)
def create_applications_link(mount_directory: pathlib.Path) -> None:
"""
Create link to /Applications in the given location
"""
worker.utils.info(f"Creating link to /Applications -> {mount_directory}")
target_path = mount_directory / " "
cmd: worker.utils.CmdSequence = ["ln", "-s", "/Applications", target_path]
worker.utils.call(cmd)
def run_applescript_file_path(
applescript_file_path: pathlib.Path,
volume_name: str,
app_bundles: typing.List[pathlib.Path],
background_image_file_path: pathlib.Path,
) -> None:
"""
Run given applescript to adjust look and feel of the DMG
"""
main_app_bundle = get_main_app_bundle(app_bundles)
architecture = platform.machine().lower()
# needs_run_applescript = (architecture != "x86_64")
needs_run_applescript = True
if not needs_run_applescript:
worker.utils.info(f"Having issues with apple script on [{architecture}], skipping !")
return
temp_script_file_path = tempfile.NamedTemporaryFile(mode="w", suffix=".applescript")
with temp_script_file_path as temp_applescript_file:
worker.utils.info(
f"Adjusting applescript [{temp_script_file_path.name}] for volume name [{volume_name}]"
)
# Adjust script to the specific volume name.
with open(applescript_file_path, mode="r") as input_file:
worker.utils.info("Start script update")
for line in input_file.readlines():
stripped_line = line.strip()
if stripped_line.startswith("tell disk"):
line = re.sub('tell disk ".*"', f'tell disk "{volume_name}"', line)
elif stripped_line.startswith("set background picture"):
if not background_image_file_path:
continue
else:
background_image_short = f".background:{background_image_file_path.name}"
line = re.sub('to file ".*"', f'to file "{background_image_short}"', line)
line = line.replace("blender.app", main_app_bundle.name)
stripped_line = line.rstrip("\r\n")
worker.utils.info(f"line={stripped_line}")
temp_applescript_file.write(line)
temp_applescript_file.flush()
worker.utils.info("End script update")
# This does not help issues when running applescript
worker.utils.info("Updating permissions")
os.chmod(temp_script_file_path.name, 0o755)
# Setting flags to this applescript will fail execution, not permitted
# command = ['chflags', "uchg", temp_script_file_path.name]
# worker.utils.call(command)
command = ["osascript", "-s", "o", temp_script_file_path.name]
worker.utils.call(command)
# NOTE: This is copied from bundle.sh. The exact reason for sleep is
# still remained a mystery.
worker.utils.info("Waiting for applescript...")
time.sleep(5)
def compress_dmg(writable_dmg_file_path: pathlib.Path, final_dmg_file_path: pathlib.Path) -> None:
"""
Compress temporary read-write DMG
"""
cmd: worker.utils.CmdSequence = [
"hdiutil",
"convert",
writable_dmg_file_path,
"-format",
"UDZO",
"-o",
final_dmg_file_path,
]
if final_dmg_file_path.exists():
worker.utils.info(f"Removing old compressed DMG [{final_dmg_file_path}]")
worker.utils.remove_file(final_dmg_file_path)
worker.utils.info("Compressing disk image...")
worker.utils.call(cmd)
def create_final_dmg(
app_bundles: typing.List[pathlib.Path],
dmg_file_path: pathlib.Path,
background_image_file_path: pathlib.Path,
volume_name: str,
applescript_file_path: pathlib.Path,
) -> None:
"""
Create DMG with all app bundles
Will take care configuring background
"""
worker.utils.info("Running all routines to create final DMG")
writable_dmg_file_path = get_writable_dmg_file_path(dmg_file_path)
worker.utils.info(f"Mouting volume [{volume_name}]")
mount_directory = get_mount_directory_for_volume_name(volume_name)
worker.utils.info(f"Mount at [{mount_directory}]")
# Make sure volume is not mounted.
# If it is mounted it will prevent removing old DMG files and could make
# it so app bundles are copied to the wrong place.
eject_volume(volume_name)
worker.utils.info(f"Creating image [{writable_dmg_file_path}] to [{volume_name}]")
create_dmg_image(app_bundles, writable_dmg_file_path, volume_name)
worker.utils.info(f"Mount r/w mode [{writable_dmg_file_path}]")
mount_readwrite_dmg(writable_dmg_file_path)
copy_background_if_needed(background_image_file_path, mount_directory)
create_applications_link(mount_directory)
run_applescript_file_path(
applescript_file_path, volume_name, app_bundles, background_image_file_path
)
eject_volume(volume_name)
compress_dmg(writable_dmg_file_path, dmg_file_path)
worker.utils.remove_file(writable_dmg_file_path)
def ensure_dmg_extension(filepath: pathlib.Path) -> pathlib.Path:
"""
Make sure given file have .dmg extension
"""
if filepath.suffix != ".dmg":
return filepath.with_suffix(f"{filepath.suffix}.dmg")
return filepath
def get_dmg_file_path(
requested_file_path: pathlib.Path, app_bundles: typing.List[pathlib.Path]
) -> pathlib.Path:
"""
Get full file path for the final DMG image
Will use the provided one when possible, otherwise will deduct it from
app bundles.
If the name is deducted, the DMG is stored in the current directory.
"""
if requested_file_path:
return ensure_dmg_extension(requested_file_path.absolute())
# TODO(sergey): This is not necessarily the main one.
main_bundle = app_bundles[0]
# Strip .app from the name
return pathlib.Path(main_bundle.name[:-4] + ".dmg").absolute()
def get_volume_name_from_dmg_file_path(dmg_file_path: pathlib.Path) -> str:
"""
Deduct volume name from the DMG path
Will use first part of the DMG file name prior to dash.
"""
tokens = dmg_file_path.stem.split("-")
words = tokens[0].split()
return " ".join(word.capitalize() for word in words)
def bundle(
source_dir: pathlib.Path,
dmg_file_path: pathlib.Path,
applescript_file_path: pathlib.Path,
background_image_file_path: pathlib.Path,
) -> None:
app_bundles = collect_and_log_app_bundles(source_dir)
for app_bundle in app_bundles:
worker.utils.info(f"App bundle path [{app_bundle}]")
dmg_file_path = get_dmg_file_path(dmg_file_path, app_bundles)
volume_name = get_volume_name_from_dmg_file_path(dmg_file_path)
worker.utils.info(f"Will produce DMG [{dmg_file_path.name}]")
create_final_dmg(
app_bundles, dmg_file_path, background_image_file_path, volume_name, applescript_file_path
)

View file

@ -0,0 +1,534 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
import multiprocessing
import os
import platform
import pathlib
from typing import Dict
from pathlib import Path
import worker.blender
import worker.utils
def fetch_ideal_cpu_count(estimate_core_memory_in_mb: int) -> int:
"""Fetch cpu ideal for the building process based on machine info"""
worker.utils.info(f"estimate_core_memory_in_mb={estimate_core_memory_in_mb}")
total_cpu_count = multiprocessing.cpu_count()
worker.utils.info(f"total_cpu_count={total_cpu_count}")
ideal_cpu_count = total_cpu_count
spare_cpu_count = 2
if platform.system().lower() != "darwin":
worker.utils.info(f"In current path {os.getcwd()}")
import psutil
virtual_memory = psutil.virtual_memory()
worker.utils.info(f"virtual_memory={virtual_memory}")
total_memory_in_bytes = virtual_memory.total
worker.utils.info(f"total_memory_in_bytes={total_memory_in_bytes}")
available_memory_in_bytes = virtual_memory.available
worker.utils.info(f"available_memory_in_bytes={available_memory_in_bytes}")
usable_memory_in_bytes = available_memory_in_bytes
worker.utils.info(f"usable_memory_in_bytes={usable_memory_in_bytes}")
estimate_memory_per_code_in_bytes = estimate_core_memory_in_mb * 1024 * 1024
worker.utils.info(f"estimate_memory_per_code_in_bytes={estimate_memory_per_code_in_bytes}")
capable_cpu_count = int(total_memory_in_bytes / estimate_memory_per_code_in_bytes)
worker.utils.info(f"capable_cpu_count={capable_cpu_count}")
min_cpu_count = min(total_cpu_count, capable_cpu_count)
worker.utils.info(f"min_cpu_count={min_cpu_count}")
ideal_cpu_count = min_cpu_count if min_cpu_count <= 8 else min_cpu_count - spare_cpu_count
worker.utils.info(f"ideal_cpu_count={ideal_cpu_count}")
return ideal_cpu_count
def get_cmake_build_type(builder: worker.blender.CodeBuilder) -> str:
if builder.build_configuration == "debug":
return "Debug"
elif builder.build_configuration == "sanitizer":
# No reliable ASAN on Windows currently.
if builder.platform != "windows":
return "RelWithDebInfo"
else:
return "Release"
else:
return "Release"
def get_cmake_options(builder: worker.blender.CodeBuilder) -> worker.utils.CmdSequence:
needs_gtest_compile = not builder.python_module
with_gtests_state = "ON" if needs_gtest_compile else "OFF"
with_gpu_binaries_state = "ON" if builder.needs_gpu_binaries else "OFF"
with_gpu_tests = False
buildbotConfig = builder.pipeline_config()
# This is meant for stable build compilation
config_file_path = "build_files/cmake/config/blender_release.cmake"
platform_config_file_path = None
if builder.platform == "darwin":
platform_config_file_path = "build_files/buildbot/config/blender_macos.cmake"
elif builder.platform == "linux":
platform_config_file_path = "build_files/buildbot/config/blender_linux.cmake"
elif builder.platform == "windows":
platform_config_file_path = "build_files/buildbot/config/blender_windows.cmake"
if platform_config_file_path:
worker.utils.info(f'Trying platform-specific buildbot configuration "{platform_config_file_path}"')
if (Path(builder.blender_dir) / platform_config_file_path).exists():
worker.utils.info(f'Using platform-specific buildbot configuration "{platform_config_file_path}"')
config_file_path = platform_config_file_path
else:
worker.utils.info(f'Using generic buildbot configuration "{config_file_path}"')
# Must be first so that we can override some of the options found in the file
options = ["-C", os.path.join(builder.blender_dir, config_file_path)]
# Optional build as Python module.
if builder.python_module:
bpy_config_file_path = "build_files/cmake/config/bpy_module.cmake"
options += ["-C", os.path.join(builder.blender_dir, bpy_config_file_path)]
options += ["-DWITH_INSTALL_PORTABLE=ON"]
can_enable_oneapi_binaries = True
if builder.service_env_id != "PROD":
# UATEST machines are too slow currently.
worker.utils.info(f'Disabling oneAPI binaries on "{builder.service_env_id}"')
can_enable_oneapi_binaries = False
if builder.patch_id:
# No enough throughput of the systems to cover AoT oneAPI binaries for patches.
worker.utils.info("Disabling oneAPI binaries for patch build")
can_enable_oneapi_binaries = False
if builder.track_id == "vexp":
# Only enable AoT oneAPI binaries for main and release branches.
worker.utils.info("Disabling oneAPI binaries for branch build")
can_enable_oneapi_binaries = False
# Add platform specific generator and configs
if builder.platform == "darwin":
if builder.needs_ninja:
options += ["-G", "Ninja"]
else:
options += ["-G", "Unix Makefiles"]
options += [f"-DCMAKE_OSX_ARCHITECTURES:STRING={builder.architecture}"]
elif builder.platform == "linux":
if builder.needs_ninja:
options += ["-G", "Ninja"]
else:
options += ["-G", "Unix Makefiles"]
elif builder.platform == "windows":
if builder.needs_ninja:
# set CC=%LLVM_DIR%\bin\clang-cl
# set CXX=%LLVM_DIR%\bin\clang-cl
# set CFLAGS=-m64 -fmsc-version=1922
# set CXXFLAGS=-m64 -fmsc-version=1922
vc_tools_install_dir = os.environ.get("VCToolsInstallDir")
if not vc_tools_install_dir:
raise BaseException("Missing environment variable VCToolsInstallDir")
vc_tool_install_path = pathlib.PureWindowsPath(vc_tools_install_dir)
if builder.architecture == "arm64":
compiler_file_path="C:/Program Files/LLVM/bin/clang-cl.exe"
compiler_file_path="C:/Program Files/LLVM/bin/clang-cl.exe"
linker_file_path="C:/Program Files/LLVM/bin/lld-link.exe"
else:
vs_tool_install_dir_suffix = "bin/Hostx64/x64"
compiler_file_path = str(vc_tool_install_path / f"{vs_tool_install_dir_suffix}/cl.exe")
linker_file_path = str(vc_tool_install_path / f"{vs_tool_install_dir_suffix}/link.exe")
options += ["-G", "Ninja"]
# -DWITH_WINDOWS_SCCACHE=On
options += [
f"-DCMAKE_C_COMPILER:FILEPATH={compiler_file_path}",
f"-DCMAKE_CXX_COMPILER:FILEPATH={compiler_file_path}",
]
# options += ["-DCMAKE_EXE_LINKER_FLAGS:STRING=/machine:x64"]
options += [f"-DCMAKE_LINKER:FILEPATH={linker_file_path}"]
# Skip the test, it does not work
options += ["-DCMAKE_C_COMPILER_WORKS=1"]
options += ["-DCMAKE_CXX_COMPILER_WORKS=1"]
else:
if builder.architecture == "arm64":
options += ["-G", "Visual Studio 17 2022", "-A", "arm64"]
else:
options += ["-G", "Visual Studio 16 2019", "-A", "x64"]
# Add configured overrides
platform_architecure = f"{builder.platform}-{builder.architecture}"
cmake_overrides: Dict[str, str] = {}
cmake_overrides.update(buildbotConfig["cmake"]["default"]["overrides"])
cmake_overrides.update(buildbotConfig["cmake"][platform_architecure]["overrides"])
# Disallow certain options
restricted_key_patterns = [
"POSTINSTALL_SCRIPT",
"OPTIX_",
"CMAKE_OSX_ARCHITECTURES",
"CMAKE_BUILD_TYPE",
"CMAKE_INSTALL_PREFIX",
"WITH_GTESTS",
"CUDA",
"WITH_CYCLES",
"CYCLES_CUDA",
]
for cmake_key in cmake_overrides.keys():
for restricted_key_pattern in restricted_key_patterns:
if restricted_key_pattern in cmake_key:
raise Exception(f"CMake key [{cmake_key}] cannot be overriden, aborting")
for cmake_key, cmake_value in cmake_overrides.items():
options += [f"-D{cmake_key}={cmake_value}"]
cmake_build_type = get_cmake_build_type(builder)
options += [f"-DCMAKE_BUILD_TYPE:STRING={cmake_build_type}"]
if builder.build_configuration == "sanitizer":
# No reliable ASAN on Windows currently.
if builder.platform != "windows":
options += ["-DWITH_COMPILER_ASAN=ON"]
options += ["-DWITH_ASSERT_RELEASE=ON"]
# Avoid buildbot timeouts, see blender/blender#116635.
options += ["-DWITH_UNITY_BUILD=OFF"]
elif builder.build_configuration == "asserts":
options += ["-DWITH_ASSERT_RELEASE=ON"]
options += [f"-DCMAKE_INSTALL_PREFIX={builder.install_dir}"]
options += ["-DWITH_INSTALL_COPYRIGHT=ON"]
options += [f"-DWITH_GTESTS={with_gtests_state}"]
if builder.platform == "windows":
if builder.architecture != "arm64":
# CUDA + HIP + oneAPI on Windows
options += [f"-DWITH_CYCLES_CUDA_BINARIES={with_gpu_binaries_state}"]
options += [f"-DWITH_CYCLES_HIP_BINARIES={with_gpu_binaries_state}"]
if can_enable_oneapi_binaries:
options += [f"-DWITH_CYCLES_ONEAPI_BINARIES={with_gpu_binaries_state}"]
options += ["-DSYCL_OFFLINE_COMPILER_PARALLEL_JOBS=2"]
else:
options += ["-DWITH_CYCLES_ONEAPI_BINARIES=OFF"]
if "hip" in buildbotConfig:
hip_version = buildbotConfig["hip"]["version"]
else:
hip_version = "5.2.21440"
if "ocloc" in buildbotConfig:
ocloc_version = buildbotConfig["ocloc"]["version"]
else:
ocloc_version = "dev_01"
options += [f"-DHIP_ROOT_DIR=C:/ProgramData/AMD/HIP/hip_sdk_{hip_version}"]
options += ["-DHIP_PERL_DIR=C:/ProgramData/AMD/HIP/strawberry/perl/bin"]
options += [f"-DOCLOC_INSTALL_DIR=C:/ProgramData/Intel/ocloc/ocloc_{ocloc_version}"]
elif builder.platform == "linux":
# CUDA on Linux
options += [f"-DWITH_CYCLES_CUDA_BINARIES={with_gpu_binaries_state}"]
options += [f"-DWITH_CYCLES_HIP_BINARIES={with_gpu_binaries_state}"]
if can_enable_oneapi_binaries:
options += [f"-DWITH_CYCLES_ONEAPI_BINARIES={with_gpu_binaries_state}"]
options += ["-DSYCL_OFFLINE_COMPILER_PARALLEL_JOBS=2"]
else:
options += ["-DWITH_CYCLES_ONEAPI_BINARIES=OFF"]
# Directory changed to just /opt/rocm in 6.x
rocm_path = pathlib.Path("/opt/rocm/hip")
if not rocm_path.exists():
rocm_path = pathlib.Path("/opt/rocm")
options += [f"-DHIP_ROOT_DIR:PATH={rocm_path}"]
# GPU render tests support Linux + NVIDIA currently
if builder.needs_gpu_tests:
with_gpu_tests = True
if builder.needs_gpu_binaries:
options += ["-DCYCLES_TEST_DEVICES=CPU;OPTIX"]
elif builder.platform == "darwin":
# Metal on macOS
if builder.architecture == "arm64":
if builder.needs_gpu_tests:
with_gpu_tests = True
options += ["-DCYCLES_TEST_DEVICES=CPU;METAL"]
if with_gpu_tests:
# Needs X11 or Wayland, and fails with xvfb to emulate X11.
# options += [f"-DWITH_GPU_DRAW_TESTS=ON"]
options += ["-DWITH_GPU_RENDER_TESTS=ON"]
options += ["-DWITH_GPU_RENDER_TESTS_SILENT=OFF"]
options += ["-DWITH_COMPOSITOR_REALTIME_TESTS=ON"]
if "optix" in buildbotConfig:
optix_version = buildbotConfig["optix"]["version"]
if builder.platform == "windows" and builder.architecture != "arm64":
options += [
f"-DOPTIX_ROOT_DIR:PATH=C:/ProgramData/NVIDIA Corporation/OptiX SDK {optix_version}"
]
elif builder.platform == "linux":
optix_base_dir = pathlib.Path.home() / ".devops" / "apps"
options += [
f"-DOPTIX_ROOT_DIR:PATH={optix_base_dir}/NVIDIA-OptiX-SDK-{optix_version}-linux64-x86_64"
]
# Blender 4.3 has switched to pre-compiled HIP-RT libraries.
if "hiprt" in buildbotConfig:
hiprt_version = buildbotConfig["hiprt"]["version"]
if builder.platform == "windows" and builder.architecture != "arm64":
options += [
f"-DHIPRT_ROOT_DIR:PATH=C:/ProgramData/AMD/HIP/hiprtsdk-{hiprt_version}/hiprt{hiprt_version}"
]
elif builder.platform == "linux":
hiprt_base_dir = pathlib.Path.home() / ".devops" / "apps"
options += [
f"-DHIPRT_ROOT_DIR:PATH={hiprt_base_dir}/hiprtsdk-{hiprt_version}/hiprt{hiprt_version}"
]
# Enable option to verify enabled libraries and features did not get disabled.
options += ["-DWITH_STRICT_BUILD_OPTIONS=ON"]
needs_cuda_compile = builder.needs_gpu_binaries
if builder.needs_gpu_binaries:
try:
cuda10_version = buildbotConfig["cuda10"]["version"]
except:
cuda10_version = buildbotConfig["sdks"]["cuda10"]["version"]
cuda10_folder_version = ".".join(cuda10_version.split(".")[:2])
try:
cuda11_version = buildbotConfig["cuda11"]["version"]
except:
cuda11_version = buildbotConfig["sdks"]["cuda11"]["version"]
cuda11_folder_version = ".".join(cuda11_version.split(".")[:2])
try:
cuda12_version = buildbotConfig["cuda12"]["version"]
cuda12_folder_version = ".".join(cuda12_version.split(".")[:2])
have_cuda12 = True
except:
have_cuda12 = False
if builder.platform == "windows" and builder.architecture != "arm64":
# CUDA 10
cuda10_path = pathlib.Path(
f"C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v{cuda10_folder_version}"
)
if not cuda10_path.exists():
raise Exception(
f"Was not able to find CUDA path [{cuda10_path}] for version [{cuda10_version}], aborting"
)
cuda10_file_path = cuda10_path / "bin" / "nvcc.exe"
options += [f"-DCUDA10_TOOLKIT_ROOT_DIR:PATH={cuda10_path}"]
options += [f"-DCUDA10_NVCC_EXECUTABLE:FILEPATH={cuda10_file_path}"]
# CUDA 11
cuda11_path = pathlib.Path(
f"C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v{cuda11_folder_version}"
)
if not cuda11_path.exists():
raise Exception(
f"Was not able to find CUDA path [{cuda11_path}] for version [{cuda11_version}], aborting"
)
cuda11_file_path = cuda11_path / "bin" / "nvcc.exe"
# CUDA 12
if have_cuda12:
cuda12_path = pathlib.Path(
f"C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v{cuda12_folder_version}"
)
if not cuda12_path.exists():
raise Exception(
f"Was not able to find CUDA path [{cuda12_path}] for version [{cuda12_version}], aborting"
)
cuda12_file_path = cuda12_path / "bin" / "nvcc.exe"
options += [f"-DCUDA11_TOOLKIT_ROOT_DIR:PATH={cuda11_path}"]
options += [f"-DCUDA11_NVCC_EXECUTABLE:FILEPATH={cuda11_file_path}"]
options += [f"-DCUDA_TOOLKIT_ROOT_DIR:PATH={cuda12_path}"]
options += [f"-DCUDA_NVCC_EXECUTABLE:FILEPATH={cuda12_file_path}"]
else:
options += [f"-DCUDA_TOOLKIT_ROOT_DIR:PATH={cuda11_path}"]
options += [f"-DCUDA_NVCC_EXECUTABLE:FILEPATH={cuda11_file_path}"]
elif builder.platform == "linux":
# CUDA 10
cuda10_path = pathlib.Path(f"/usr/local/cuda-{cuda10_folder_version}")
if not cuda10_path.exists():
raise Exception(
f"Was not able to find CUDA path [{cuda10_path}] for version [{cuda10_version}], aborting"
)
cuda10_file_path = cuda10_path / "bin" / "nvcc"
# CUDA 11
cuda11_path = pathlib.Path(f"/usr/local/cuda-{cuda11_folder_version}")
if not cuda11_path.exists():
raise Exception(
f"Was not able to find CUDA path [{cuda11_path}] for version [{cuda11_version}], aborting"
)
cuda11_file_path = cuda11_path / "bin" / "nvcc"
# CUDA 12
if have_cuda12:
cuda12_path = pathlib.Path(f"/usr/local/cuda-{cuda12_folder_version}")
if not cuda12_path.exists():
raise Exception(
f"Was not able to find CUDA path [{cuda12_path}] for version [{cuda12_version}], aborting"
)
cuda12_file_path = cuda12_path / "bin" / "nvcc"
# CUDA 10, must provide compatible host compiler.
options += [f"-DCUDA10_TOOLKIT_ROOT_DIR:PATH={cuda10_path}"]
if pathlib.Path(
"/etc/rocky-release"
).exists(): # We check for Rocky. Version 8 has GCC 8 in /usr/bin
options += [f"-DCUDA10_NVCC_EXECUTABLE:STRING={cuda10_file_path}"]
options += ["-DCUDA_HOST_COMPILER=/usr/bin/gcc"]
else:
# Use new CMake option.
options += [f"-DCUDA10_NVCC_EXECUTABLE:STRING={cuda10_file_path}"]
options += ["-DCUDA_HOST_COMPILER=/opt/rh/devtoolset-8/root/usr/bin/gcc"]
# CUDA 11 or 12.
if have_cuda12:
options += [f"-DCUDA11_TOOLKIT_ROOT_DIR:PATH={cuda11_path}"]
options += [f"-DCUDA11_NVCC_EXECUTABLE:STRING={cuda11_file_path}"]
options += [f"-DCUDA_TOOLKIT_ROOT_DIR:PATH={cuda12_path}"]
options += [f"-DCUDA_NVCC_EXECUTABLE:FILEPATH={cuda12_file_path}"]
else:
options += [f"-DCUDA_TOOLKIT_ROOT_DIR:PATH={cuda11_path}"]
options += [f"-DCUDA_NVCC_EXECUTABLE:FILEPATH={cuda11_file_path}"]
else:
worker.utils.info("Skipping gpu compilation as requested")
return options
def clean_directories(builder: worker.blender.CodeBuilder) -> None:
worker.utils.info(f"Cleaning directory [{builder.install_dir})] from the previous run")
worker.utils.remove_dir(builder.install_dir)
os.makedirs(builder.build_dir, exist_ok=True)
worker.utils.info("Remove buildinfo files to re-generate them")
for build_info_file_name in (
"buildinfo.h",
"buildinfo.h.txt",
):
full_path = builder.build_dir / "source" / "creator" / build_info_file_name
if full_path.exists():
worker.utils.info(f"Removing file [{full_path}]")
worker.utils.remove_file(full_path)
def cmake_configure(builder: worker.blender.CodeBuilder) -> None:
cmake_cache_file_path = builder.build_dir / "CMakeCache.txt"
if cmake_cache_file_path.exists():
worker.utils.info("Removing CMake cache")
worker.utils.remove_file(cmake_cache_file_path)
worker.utils.info("CMake configure options")
cmake_options = get_cmake_options(builder)
cmd = ["cmake", "-S", builder.blender_dir, "-B", builder.build_dir] + list(cmake_options)
builder.call(cmd)
# This hack does not work as expected, since cmake cache is the always updated, we end up recompiling on each compile step, code, gpu and install
needs_cmake_cache_hack = False
if needs_cmake_cache_hack and pathlib.Path("/usr/lib64/libpthread.a").exists():
# HACK: The detection for lib pthread does not work on CentOS 7
worker.utils.warning(f"Hacking file [{cmake_cache_file_path}]")
tmp_cmake_cache_file_path = builder.build_dir / "CMakeCache.txt.tmp"
fin = open(cmake_cache_file_path)
fout = open(tmp_cmake_cache_file_path, "wt")
for line in fin:
# worker.utils.info(line)
if "OpenMP_pthread_LIBRARY:FILEPATH=OpenMP_pthread_LIBRARY-NOTFOUND" in line:
worker.utils.warning(
"Replacing [OpenMP_pthread_LIBRARY-NOTFOUND] to [/usr/lib64/libpthread.a]"
)
line = line.replace(
"OpenMP_pthread_LIBRARY:FILEPATH=OpenMP_pthread_LIBRARY-NOTFOUND",
"OpenMP_pthread_LIBRARY:FILEPATH=/usr/lib64/libpthread.a",
)
fout.write(line)
fin.close()
fout.close()
worker.utils.warning(f"Updating [{cmake_cache_file_path}]")
os.replace(tmp_cmake_cache_file_path, cmake_cache_file_path)
def cmake_build(builder: worker.blender.CodeBuilder, do_install: bool) -> None:
if builder.track_id in ["vdev", "v430"]:
if builder.platform == "windows":
estimate_gpu_memory_in_mb = 6000
else:
estimate_gpu_memory_in_mb = 4000
else:
estimate_gpu_memory_in_mb = 6000
estimate_core_memory_in_mb = estimate_gpu_memory_in_mb if builder.needs_gpu_binaries else 1000
ideal_cpu_count = fetch_ideal_cpu_count(estimate_core_memory_in_mb)
# Enable verbose building to make ninja to output more often.
# It should help with slow build commands like OneAPI, as well as will help
# troubleshooting situations when the compile-gpu step times out.
needs_verbose = builder.needs_gpu_binaries
build_type = get_cmake_build_type(builder)
cmd = ["cmake", "--build", builder.build_dir, "--config", build_type]
cmd += ["--parallel", f"{ideal_cpu_count}"]
if do_install:
cmd += ["--target", "install"]
if needs_verbose:
cmd += ["--verbose"]
builder.call(cmd)
def compile_code(builder: worker.blender.CodeBuilder) -> None:
builder.needs_gpu_binaries = False
builder.setup_build_environment()
clean_directories(builder)
cmake_configure(builder)
cmake_build(builder, False)
def compile_gpu(builder: worker.blender.CodeBuilder) -> None:
if builder.platform == "darwin":
worker.utils.info("Compile GPU not required on macOS")
return
builder.needs_gpu_binaries = True
builder.setup_build_environment()
cmake_configure(builder)
cmake_build(builder, False)
def compile_install(builder: worker.blender.CodeBuilder) -> None:
builder.setup_build_environment()
cmake_configure(builder)
cmake_build(builder, True)

View file

@ -0,0 +1,34 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# This is a script which is used as POST-INSTALL one for regular CMake's
# INSTALL target.
#
# It is used by buildbot workers to sign every binary which is going into
# the final bundle.
#
execute_process(
COMMAND python "${CMAKE_CURRENT_LIST_DIR}/cpack_post.py" "${CMAKE_INSTALL_PREFIX}"
WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}
RESULT_VARIABLE exit_code
)
if(NOT exit_code EQUAL "0")
message(FATAL_ERROR "Non-zero exit code of codesign tool")
endif()

View file

@ -0,0 +1,30 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
import pathlib
import sys
sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent.parent))
import worker.blender.sign
import worker.utils
path = pathlib.Path(sys.argv[1]).resolve()
worker.blender.sign.sign_windows("PROD", path)
if str(path).find("Unspecified") != -1:
print("Probably running with cpack command, adding Blender path")
blender_path = path.parent / "Blender"
worker.blender.sign.sign_windows("PROD", blender_path)
print("Codesign for cpack is finished")
# Only do this for zip
if str(path).find("ZIP") != -1:
new_path = path.parent / path.name.replace("-windows64", "")
package_file_path = new_path.parent / (new_path.name + ".zip")
worker.utils.call(["7z", "a", "-tzip", package_file_path, path, "-r"])
worker.utils.call(["7z", "rn", package_file_path, path.name, new_path.name])

View file

@ -0,0 +1,45 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
import os
import sys
import worker.blender
import worker.utils
def make_format(builder: worker.blender.CodeBuilder) -> bool:
os.chdir(builder.blender_dir)
# Always run formatting with scripts from main, for security on unverified patches.
# TODO: how secure is this? How to test formatting issues in the scripts themselves?
# main_files = [makefile, "tools/utils_maintenance", "build_files/windows"]
# for main_file in main_files:
# worker.utils.call(['git', 'checkout', 'origin/main', '--', main_file])
# Run format
if builder.platform == "windows":
builder.call(["make.bat", "format"])
else:
builder.call(["make", "-f", "GNUmakefile", "format"])
# Check for changes
diff = worker.utils.check_output(["git", "diff"])
if len(diff) > 0:
print(diff)
# Reset
worker.utils.call(["git", "checkout", "HEAD", "--", "."])
if len(diff) > 0:
worker.utils.error('Incorrect formatting detected, run "make format" to fix')
return False
return True
def lint(builder: worker.blender.CodeBuilder) -> None:
ok = make_format(builder)
if not ok:
sys.exit(1)

View file

@ -0,0 +1,114 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
import pathlib
import zipfile
import worker.utils
def pack(
# Version string in the form of 2.83.3.0, this is used in the Store package name
version: str,
# Input file path
input_file_path: pathlib.Path,
# A string in the form of 'CN=PUBLISHER'
publisher: str,
# If set this MSIX is for an LTS release
lts: bool = False,
# If set remove Content folder if it already exists
overwrite: bool = False,
# Don't actually execute commands
dry_run: bool = False,
) -> pathlib.Path:
LTSORNOT = ""
PACKAGETYPE = ""
if lts:
versionparts = version.split(".")
LTSORNOT = f" {versionparts[0]}.{versionparts[1]} LTS"
PACKAGETYPE = f"{versionparts[0]}.{versionparts[1]}LTS"
output_package_file_name = f"{input_file_path.stem}.msix"
output_package_file_path = pathlib.Path(".", output_package_file_name)
content_folder = pathlib.Path(".", "Content")
content_blender_folder = pathlib.Path(content_folder, "Blender")
content_assets_folder = pathlib.Path(content_folder, "Assets")
assets_original_folder = pathlib.Path(".", "Assets")
pri_config_file = pathlib.Path(".", "priconfig.xml")
pri_resources_file = pathlib.Path(content_folder, "resources.pri")
pri_command = [
"makepri",
"new",
"/pr",
f"{content_folder.absolute()}",
"/cf",
f"{pri_config_file.absolute()}",
"/of",
f"{pri_resources_file.absolute()}",
]
msix_command = [
"makeappx",
"pack",
"/h",
"sha256",
"/d",
f"{content_folder.absolute()}",
"/p",
f"{output_package_file_path.absolute()}",
]
if overwrite:
if content_folder.joinpath("Assets").exists():
worker.utils.remove_dir(content_folder)
content_folder.mkdir(exist_ok=True)
worker.utils.copy_dir(assets_original_folder, content_assets_folder)
manifest_text = pathlib.Path("AppxManifest.xml.template").read_text()
manifest_text = manifest_text.replace("[VERSION]", version)
manifest_text = manifest_text.replace("[PUBLISHER]", publisher)
manifest_text = manifest_text.replace("[LTSORNOT]", LTSORNOT)
manifest_text = manifest_text.replace("[PACKAGETYPE]", PACKAGETYPE)
pathlib.Path(content_folder, "AppxManifest.xml").write_text(manifest_text)
worker.utils.info(
f"Extracting files from [{input_file_path}] to [{content_blender_folder.absolute()}]"
)
# Extract the files from the ZIP archive, but skip the leading part of paths
# in the ZIP. We want to write the files to the content_blender_folder where
# blender.exe ends up as ./Content/Blender/blender.exe, and not
# ./Content/Blender/blender-2.83.3-windows64/blender.exe
with zipfile.ZipFile(input_file_path, "r") as blender_zip:
for entry in blender_zip.infolist():
if entry.is_dir():
continue
entry_location = pathlib.Path(entry.filename)
target_location = content_blender_folder.joinpath(*entry_location.parts[1:])
pathlib.Path(target_location.parent).mkdir(parents=True, exist_ok=True)
extracted_entry = blender_zip.read(entry)
target_location.write_bytes(extracted_entry)
worker.utils.info("... extraction complete.")
worker.utils.info("Generating Package Resource Index (PRI) file")
worker.utils.call(pri_command, dry_run=dry_run)
worker.utils.info(f"Creating MSIX package using command: {' '.join(msix_command)}")
# Remove MSIX file if it already exists. Otherwise the MakeAppX tool
# will hang.
worker.utils.remove_file(output_package_file_path)
worker.utils.call(msix_command, dry_run=dry_run)
if dry_run:
output_package_file_path.write_text("Dry run dummy package file")
worker.utils.remove_dir(content_folder)
worker.utils.info("Done.")
return output_package_file_path

View file

@ -0,0 +1,357 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
# Runs on buildbot worker, creating a release package using the build
# system and zipping it into buildbot_upload.zip. This is then uploaded
# to the master in the next buildbot step.
import hashlib
import json
import os
import sys
import pathlib
import tarfile
import worker.blender
import worker.utils
import worker.blender.sign
import worker.blender.bundle_dmg
import worker.blender.version
# SemVer based file naming
def get_package_name(builder: worker.blender.CodeBuilder) -> str:
version_info = worker.blender.version.VersionInfo(builder)
# For release branch we will trim redundant info
branch_id = (
builder.branch_id.replace("/", "-")
.replace(".", "")
.replace("blender-", "")
.replace("-release", "")
)
package_name = "bpy" if builder.python_module else "blender"
package_name += f"-{version_info.version}"
package_name += f"-{version_info.risk_id}"
package_name += f"+{branch_id}"
if builder.patch_id:
if builder.patch_id.startswith("D"):
package_name += f"-{builder.patch_id}"
else:
package_name += f"-PR{builder.patch_id}"
package_name += f".{version_info.hash}"
package_name += f"-{builder.platform}"
package_name += f".{builder.architecture}"
package_name += f"-{builder.build_configuration}"
return package_name
# Generate .sha256 file next to packge
def generate_file_hash(package_file_path: pathlib.Path) -> None:
hash_algorithm = hashlib.sha256()
mem_array = bytearray(128 * 1024)
mem_view = memoryview(mem_array)
with open(package_file_path, "rb", buffering=0) as f:
while 1:
# https://github.com/python/typeshed/issues/2166
n = f.readinto(mem_view) # type: ignore
if not n:
break
hash_algorithm.update(mem_view[:n])
hash_file_path = (package_file_path.parent) / (package_file_path.name + ".sha256")
hash_text = hash_algorithm.hexdigest()
hash_file_path.write_text(hash_text)
worker.utils.info(f"Generated hash [{hash_file_path}]")
print(hash_text)
# tar cf archive.tar test.c --owner=0 --group=0
def create_tar_xz(src: pathlib.Path, dest: pathlib.Path, package_name: str) -> None:
# One extra to remove leading os.sep when cleaning root for package_root
ln = len(str(src)) + 1
flist = list()
# Create list of tuples containing file and archive name
for root, dirs, files in os.walk(src):
package_root = os.path.join(package_name, root[ln:])
flist.extend(
[(os.path.join(root, file), os.path.join(package_root, file)) for file in files]
)
# Set UID/GID of archived files to 0, otherwise they'd be owned by whatever
# user compiled the package. If root then unpacks it to /usr/local/ you get
# a security issue.
def _fakeroot(tarinfo: tarfile.TarInfo) -> tarfile.TarInfo:
tarinfo.gid = 0
tarinfo.gname = "root"
tarinfo.uid = 0
tarinfo.uname = "root"
return tarinfo
# Silence false positive mypy error.
package = tarfile.open(dest, "w:xz", preset=6) # type: ignore[call-arg]
for entry in flist:
worker.utils.info(f"Adding [{entry[0]}] to archive [{entry[1]}]")
package.add(entry[0], entry[1], recursive=False, filter=_fakeroot)
package.close()
def cleanup_files(dirpath: pathlib.Path, extension: str) -> None:
if dirpath.exists():
for filename in os.listdir(dirpath):
filepath = pathlib.Path(os.path.join(dirpath, filename))
if filepath.is_file() and filename.endswith(extension):
worker.utils.remove_file(filepath)
def pack_mac(builder: worker.blender.CodeBuilder) -> None:
version_info = worker.blender.version.VersionInfo(builder)
os.chdir(builder.build_dir)
cleanup_files(builder.package_dir, ".dmg")
package_name = get_package_name(builder)
package_file_name = package_name + ".dmg"
package_file_path = builder.package_dir / package_file_name
applescript_file_path = pathlib.Path(__file__).parent.resolve() / "blender.applescript"
background_image_file_path = builder.blender_dir / "release" / "darwin" / "background.tif"
worker.blender.bundle_dmg.bundle(
builder.install_dir, package_file_path, applescript_file_path, background_image_file_path
)
# Sign
worker.blender.sign.sign_darwin_files(builder, [package_file_path], "entitlements.plist")
# Notarize
worker_config = builder.get_worker_config()
team_id = worker_config.sign_code_darwin_team_id
apple_id = worker_config.sign_code_darwin_apple_id
keychain_profile = worker_config.sign_code_darwin_keychain_profile
timeout = "30m"
if builder.service_env_id == "LOCAL" and not apple_id:
worker.utils.info("Skipping notarization without Apple ID in local build")
return
# Upload file and wait for completion.
notarize_cmd = [
"xcrun",
"notarytool",
"submit",
package_file_path,
"--apple-id",
worker.utils.HiddenArgument(apple_id),
"--keychain-profile",
worker.utils.HiddenArgument(keychain_profile),
"--team-id",
worker.utils.HiddenArgument(team_id),
"--timeout",
timeout,
"--wait",
"--output-format",
"json",
]
request = worker.utils.check_output(notarize_cmd)
request_data = json.loads(request)
request_id = request_data["id"]
request_status = request_data["status"]
# Show logs
worker.utils.call(
["xcrun", "notarytool", "log", "--keychain-profile", keychain_profile, request_id],
retry_count=5,
retry_wait_time=10.0,
)
# Failed?
if request_status != "Accepted":
raise Exception("Notarization failed, aborting")
# Staple it
worker.utils.call(["xcrun", "stapler", "staple", package_file_path])
generate_file_hash(package_file_path)
def pack_win(builder: worker.blender.CodeBuilder, pack_format: str) -> None:
os.chdir(builder.build_dir)
if pack_format == "msi":
cpack_type = "WIX"
else:
cpack_type = "ZIP"
package_extension = pack_format
cleanup_files(builder.package_dir, f".{package_extension}")
script_folder_path = pathlib.Path(os.path.realpath(__file__)).parent
# Will take care of codesigning and correct the folder name in zip
#
# Code signing is done as part of INSTALL target, which makes it possible to sign
# files which are aimed into a bundle and coming from a non-signed source (such as
# libraries SVN).
#
# This is achieved by specifying cpack_post.cmake as a post-install script run
# by cpack. cpack_post.ps1 takes care of actual code signing.
post_script_file_path = script_folder_path / "cpack_post.cmake"
app_id = "Blender"
final_package_name = get_package_name(builder)
# MSI needs the app id for the Windows menu folder name
# It will also fail if anything else.
cpack_package_name = app_id if pack_format == "msi" else final_package_name
cmake_cmd = [
"cmake",
f"-DCPACK_PACKAGE_NAME:STRING={cpack_package_name}",
f"-DCPACK_OVERRIDE_PACKAGENAME:STRING={cpack_package_name}",
# Only works with ZIP, ignored by MSI
# f'-DARCHIVE_FILE:STRING={package_name}',
# f'-DCPACK_PACKAGE_FILE_NAME:STRING={cpack_package_name}',
f"-DCMAKE_INSTALL_PREFIX:PATH={builder.install_dir}",
f"-DPOSTINSTALL_SCRIPT:PATH={post_script_file_path}",
".",
]
builder.call(cmake_cmd)
worker.utils.info("Packaging Blender files")
cpack_cmd = [
"cpack",
"-G",
cpack_type,
# '--verbose',
"--trace-expand",
"-C",
builder.build_configuration,
"-B",
str(builder.package_dir), # CPACK_PACKAGE_DIRECTORY
"-P",
cpack_package_name,
]
builder.call(cpack_cmd)
final_package_file_name = f"{final_package_name}.{package_extension}"
final_package_file_path = builder.package_dir / final_package_file_name
# HACK: Rename files correctly, packages are appended `-windows64` with no option to rename
bogus_cpack_file_path = (
builder.package_dir / f"{cpack_package_name}-windows64.{package_extension}"
)
if pack_format == "zip":
if bogus_cpack_file_path.exists():
worker.utils.info(f"Removing bogus file [{bogus_cpack_file_path}]")
worker.utils.remove_file(bogus_cpack_file_path)
source_cpack_file_path = (
builder.package_dir
/ "_CPack_Packages"
/ "Windows"
/ "ZIP"
/ f"{final_package_file_name}"
)
worker.utils.info(f"Moving [{source_cpack_file_path}] to [{final_package_file_path}]")
os.rename(source_cpack_file_path, final_package_file_path)
else:
os.rename(bogus_cpack_file_path, final_package_file_path)
version_info = worker.blender.version.VersionInfo(builder)
description = f"Blender {version_info.version}"
worker.blender.sign.sign_windows_files(builder.service_env_id, [final_package_file_path],
description=description)
generate_file_hash(final_package_file_path)
def pack_linux(builder: worker.blender.CodeBuilder) -> None:
blender_executable = builder.install_dir / "blender"
version_info = worker.blender.version.VersionInfo(builder)
# Strip all unused symbols from the binaries
worker.utils.info("Stripping binaries")
builder.call(["strip", "--strip-all", blender_executable])
worker.utils.info("Stripping python")
# This should work for 3.0, but for now it is in 3.00
py_target = builder.install_dir / version_info.short_version
if not os.path.exists(py_target):
# Support older format and current issue with 3.00
py_target = builder.install_dir / ("%d.%02d" % (version_info.major, version_info.minor))
worker.utils.call(["find", py_target, "-iname", "*.so", "-exec", "strip", "-s", "{}", ";"])
package_name = get_package_name(builder)
package_file_name = f"{package_name}.tar.xz"
package_file_path = builder.package_dir / package_file_name
worker.utils.info(f"Creating [{package_file_path}] archive")
os.makedirs(builder.package_dir, exist_ok=True)
create_tar_xz(builder.install_dir, package_file_path, package_name)
generate_file_hash(package_file_path)
def pack_python_module(builder: worker.blender.CodeBuilder) -> None:
cleanup_files(builder.package_dir, ".whl")
cleanup_files(builder.package_dir, ".zip")
package_name = get_package_name(builder) + ".zip"
package_filepath = builder.package_dir / package_name
pack_script = builder.blender_dir / "build_files" / "utils" / "make_bpy_wheel.py"
# Make wheel
worker.utils.info("Packaging Python Wheel")
cmd = [sys.executable, pack_script, builder.install_dir]
cmd += ["--build-dir", builder.build_dir]
cmd += ["--output-dir", builder.package_dir]
builder.call(cmd)
# Pack wheel in zip, until pipeline and www can deal with .whl files.
import zipfile
with zipfile.ZipFile(package_filepath, "w") as zipf:
for whl_name in os.listdir(builder.package_dir):
if whl_name.endswith(".whl"):
whl_filepath = builder.package_dir / whl_name
zipf.write(whl_filepath, arcname=whl_name)
cleanup_files(builder.package_dir, ".whl")
generate_file_hash(package_filepath)
def pack(builder: worker.blender.CodeBuilder) -> None:
builder.setup_build_environment()
# Create clean package directory
worker.utils.remove_dir(builder.package_dir)
os.makedirs(builder.package_dir, exist_ok=True)
# Make sure install directory always exists
os.makedirs(builder.install_dir, exist_ok=True)
if builder.python_module:
pack_python_module(builder)
elif builder.platform == "darwin":
pack_mac(builder)
elif builder.platform == "windows":
pack_win(builder, "zip")
if builder.track_id not in ["vdev", "vexp"]:
pack_win(builder, "msi")
elif builder.platform == "linux":
pack_linux(builder)

View file

@ -0,0 +1,195 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
import pathlib
import sys
from typing import Optional, Sequence
import worker.blender
import worker.utils
def sign_windows_files(
service_env_id: str,
file_paths: Sequence[pathlib.Path],
description: Optional[str] = None,
certificate_id: str = "",
) -> None:
import conf.worker
worker_config = conf.worker.get_config(service_env_id)
# TODO: Rotate them if first 1 fails
timeserver = worker_config.sign_code_windows_time_servers[0]
server_url = worker_config.sign_code_windows_server_url
if not certificate_id:
certificate_id = worker_config.sign_code_windows_certificate
dry_run = False
if service_env_id == "LOCAL" and not certificate_id:
worker.utils.warning("Performing dry run on LOCAL service environment")
dry_run = True
cmd_args = [
sys.executable,
"C:\\tools\\codesign.py",
"--server-url",
worker.utils.HiddenArgument(server_url),
]
if description:
cmd_args += ["--description", description]
cmd: worker.utils.CmdSequence = cmd_args
# Signing one file at a time causes a stampede on servers, resulting in blocking.
# Instead sign in chunks of multiple files.
chunk_size = 25 # Sign how many files at a time
retry_count = 3
for i in range(0, len(file_paths), chunk_size):
file_chunks = file_paths[i : i + chunk_size]
worker.utils.call(list(cmd) + list(file_chunks), retry_count=retry_count, dry_run=dry_run)
def sign_windows(service_env_id: str, install_path: pathlib.Path) -> None:
# TODO: Why use a junction? Is there some failure with long file paths?
# worker.utils.info("Creating building link")
# temp_build_root_path = pathlib.Path("C:/BlenderTemp")
# os.makedirs(temp_build_root_path, exist_ok=True)
# orig_install_path = install_path
# install_path = temp_build_root_path / install_path.name
try:
# TODO
# New-Item -type Junction -path install_path -value orig_install_path
worker.utils.info("Collecting files to process")
file_paths = list(install_path.glob("*.exe"))
file_paths += list(install_path.glob("*.dll"))
file_paths += list(install_path.glob("*.pyd"))
file_paths = [f for f in file_paths if str(f).find("blender.crt") == -1]
for f in file_paths:
print(f)
sign_windows_files(service_env_id, file_paths)
finally:
# worker.utils.info(f"Removing temporary folder {temp_build_root_path}")
# worker.utils.remove_dir(temp_build_root_path, retry_count=5, retry_wait_time=5.0)
# TODO: is this really necessary?
# worker.utils.info("Flushing volume cache...")
# Write-VolumeCache -DriveLetter C
# core_shell_retry_command -retry_count 5 -delay_in_milliseconds 1000 -script_block `
# worker.utils.info("Junction information...")
# junction = Get-Item -Path install_path
# worker.utils.info(junction | Format-Table)
# worker.utils.info("Attempting to remove...")
# junction.Delete()
# worker.utils.info("Junction deleted!")
pass
worker.utils.info("End of codesign steps")
def sign_darwin_files(
builder: worker.blender.CodeBuilder,
file_paths: Sequence[pathlib.Path],
entitlements_file_name: str
) -> None:
entitlements_path = builder.code_path / "release" / "darwin" / entitlements_file_name
if not entitlements_path.exists():
raise Exception(f"File {entitlements_path} not found, aborting")
worker_config = builder.get_worker_config()
certificate_id = worker_config.sign_code_darwin_certificate
dry_run = False
if builder.service_env_id == "LOCAL" and not certificate_id:
worker.utils.warning("Performing dry run on LOCAL service environment")
dry_run = True
keychain_password = worker_config.darwin_keychain_password(builder.service_env_id)
cmd: worker.utils.CmdSequence = [
"security",
"unlock-keychain",
"-p",
worker.utils.HiddenArgument(keychain_password),
]
worker.utils.call(cmd, dry_run=dry_run)
for file_path in file_paths:
if file_path.is_dir() and file_path.suffix != ".app":
continue
# Remove signature
if file_path.suffix != ".dmg":
worker.utils.call(
["codesign", "--remove-signature", file_path], exit_on_error=False, dry_run=dry_run
)
# Add signature
worker.utils.call(
[
"codesign",
"--force",
"--timestamp",
"--options",
"runtime",
f"--entitlements={entitlements_path}",
"--sign",
certificate_id,
file_path,
],
retry_count=3,
dry_run=dry_run,
)
if file_path.suffix == ".app":
worker.utils.info(f"Vaildating app bundle {file_path}")
worker.utils.call(
["codesign", "-vvv", "--deep", "--strict", file_path], dry_run=dry_run
)
def sign_darwin(builder: worker.blender.CodeBuilder) -> None:
bundle_path = builder.install_dir / "Blender.app"
# Executables
sign_path = bundle_path / "Contents" / "MacOS"
worker.utils.info(f"Collecting files to process in {sign_path}")
sign_darwin_files(builder, list(sign_path.rglob("*")), "entitlements.plist")
# Thumbnailer app extension.
thumbnailer_appex_path = bundle_path / "Contents" / "PlugIns" / "blender-thumbnailer.appex"
if thumbnailer_appex_path.exists():
sign_path = thumbnailer_appex_path / "Contents" / "MacOS"
worker.utils.info(f"Collecting files to process in {sign_path}")
sign_darwin_files(builder, list(sign_path.rglob("*")), "thumbnailer_entitlements.plist")
# Shared librarys and Python
sign_path = bundle_path / "Contents" / "Resources"
worker.utils.info(f"Collecting files to process in {sign_path}")
file_paths = list(
set(sign_path.rglob("*.dylib"))
| set(sign_path.rglob("*.so"))
| set(sign_path.rglob("python3.*"))
)
sign_darwin_files(builder, file_paths, "entitlements.plist")
# Bundle
worker.utils.info(f"Signing app bundle {bundle_path}")
sign_darwin_files(builder, [bundle_path], "entitlements.plist")
def sign(builder: worker.blender.CodeBuilder) -> None:
builder.setup_build_environment()
if builder.platform == "windows":
sign_windows(builder.service_env_id, builder.install_dir)
elif builder.platform == "darwin":
sign_darwin(builder)
else:
worker.utils.info("No code signing to be done on this platform")

View file

@ -0,0 +1,60 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
import os
import shutil
from typing import List
import worker.blender
import worker.blender.pack
import worker.blender.compile
def get_ctest_arguments(builder: worker.blender.CodeBuilder) -> List[str]:
args = ["--output-on-failure"]
# GPU tests are currently slow and can cause timeouts.
if not builder.needs_gpu_tests:
args += ["--parallel", "4"]
args += ["-C", worker.blender.compile.get_cmake_build_type(builder)]
return args
def package_for_upload(builder: worker.blender.CodeBuilder, success: bool) -> None:
build_tests_dir = builder.build_dir / "tests"
package_tests_dir = builder.package_dir / "tests"
if not build_tests_dir.exists():
return
os.makedirs(package_tests_dir, exist_ok=True)
# Upload package on failure
if not success:
package_filename = "tests-" + worker.blender.pack.get_package_name(builder)
package_filepath = package_tests_dir / package_filename
shutil.copytree(build_tests_dir, package_filepath)
shutil.make_archive(str(package_filepath), "zip", package_tests_dir, package_filename)
shutil.rmtree(package_filepath)
# Always upload unpacked folder for main and release tracks,
# when using GPU tests. This is useful for debugging GPU
# differences.
if builder.track_id != "vexp" and builder.needs_gpu_tests:
branch = builder.branch_id.replace("blender-", "").replace("-release", "")
name = f"{branch}-{builder.platform}-{builder.architecture}"
shutil.copytree(build_tests_dir, package_tests_dir / name)
def test(builder: worker.blender.CodeBuilder) -> None:
builder.setup_build_environment()
os.chdir(builder.build_dir)
success = False
try:
builder.call(["ctest"] + get_ctest_arguments(builder))
success = True
finally:
package_for_upload(builder, success)

View file

@ -0,0 +1,53 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
import os
import sys
import worker.blender
import worker.utils
def _clean_folders(builder: worker.blender.CodeBuilder) -> None:
# Delete build folders.
if builder.needs_full_clean:
worker.utils.remove_dir(builder.build_dir)
else:
worker.utils.remove_dir(builder.build_dir / "Testing")
worker.utils.remove_dir(builder.build_dir / "bin" / "tests")
# Delete install and packaging folders
worker.utils.remove_dir(builder.install_dir)
worker.utils.remove_dir(builder.package_dir)
def update(builder: worker.blender.CodeBuilder) -> None:
_clean_folders(builder)
builder.update_source()
os.chdir(builder.code_path)
make_update_path = builder.code_path / "build_files" / "utils" / "make_update.py"
make_update_text = make_update_path.read_text()
if "def svn_update" in make_update_text:
worker.utils.error("Can't build branch or pull request that uses Subversion libraries.")
worker.utils.error("Merge with latest main or release branch to use Git LFS libraries.")
sys.exit(1)
# Run make update
cmd = [
sys.executable,
make_update_path,
"--no-blender",
"--use-linux-libraries",
"--use-tests",
"--architecture",
builder.architecture,
]
if builder.track_id not in ("v360", "vexp"):
cmd += ["--prune-destructive"]
worker.utils.call(cmd)

View file

@ -0,0 +1,52 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
import pathlib
import re
import worker.blender
class VersionInfo:
def __init__(self, builder: worker.blender.CodeBuilder):
# Get version information
buildinfo_h = builder.build_dir / "source" / "creator" / "buildinfo.h"
blender_h = (
builder.blender_dir / "source" / "blender" / "blenkernel" / "BKE_blender_version.h"
)
version_number = int(self._parse_header_file(blender_h, "BLENDER_VERSION"))
version_number_patch = int(self._parse_header_file(blender_h, "BLENDER_VERSION_PATCH"))
self.major, self.minor, self.patch = (
version_number // 100,
version_number % 100,
version_number_patch,
)
if self.major >= 3:
self.short_version = "%d.%d" % (self.major, self.minor)
self.version = "%d.%d.%d" % (self.major, self.minor, self.patch)
else:
self.short_version = "%d.%02d" % (self.major, self.minor)
self.version = "%d.%02d.%d" % (self.major, self.minor, self.patch)
self.version_cycle = self._parse_header_file(blender_h, "BLENDER_VERSION_CYCLE")
if buildinfo_h.exists():
self.hash = self._parse_header_file(buildinfo_h, "BUILD_HASH")[1:-1]
else:
self.hash = ""
self.risk_id = self.version_cycle.replace("release", "stable").replace("rc", "candidate")
self.is_development_build = self.version_cycle == "alpha"
def _parse_header_file(self, filename: pathlib.Path, define: str) -> str:
regex = re.compile(r"^#\s*define\s+%s\s+(.*)" % define)
with open(filename, "r") as file:
for l in file:
match = regex.match(l)
if match:
return match.group(1)
raise BaseException(f"Failed to parse {filename.name} header for {define}")