builder.braak.pro/config/worker/blender/benchmark.py
2024-11-19 21:59:53 +01:00

124 lines
4 KiB
Python

# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
import json
import os
import pathlib
import urllib.request
import sys
import conf.worker
import worker.blender
import worker.utils
def create_upload(
builder: worker.blender.CodeBuilder, benchmark_path: pathlib.Path, revision: str
) -> None:
# Create package directory.
branch = builder.branch_id.replace("blender-", "").replace("-release", "")
name = f"{branch}-{builder.platform}-{builder.architecture}"
package_dir = builder.package_dir / name
worker.utils.remove_dir(package_dir)
os.makedirs(package_dir, exist_ok=True)
# Fetch existing summary
worker_config = conf.worker.get_config(builder.service_env_id)
base_urls = {
"LOCAL": str(worker_config.buildbot_download_folder),
"UATEST": "https://builder.uatest.blender.org/download",
"PROD": "https://builder.blender.org/download",
}
base_url = base_urls[builder.service_env_id]
summary_json_url = f"{base_url}/daily/benchmarks/{name}/summary.json"
summary_json_path = package_dir / "summary.json"
try:
if builder.service_env_id == "LOCAL":
worker.utils.copy_file(pathlib.Path(summary_json_url), summary_json_path)
else:
urllib.request.urlretrieve(summary_json_url, summary_json_path)
except Exception as e:
error_msg = str(e)
worker.utils.warning(f"Could not retrieve benchmark summary.json: {error_msg}")
# Create json files in package directory.
results_json_path = benchmark_path / "results.json"
revision_json_path = package_dir / f"{revision}.json"
worker.utils.copy_file(results_json_path, revision_json_path)
summary_json = []
if summary_json_path.exists():
summary_json = json.loads(summary_json_path.read_text())
summary_json += json.loads(results_json_path.read_text())
summary_json_path.write_text(json.dumps(summary_json, indent=2))
# Create html file in package directory.
report_html_path = package_dir / "report.html"
cmd = [
sys.executable,
builder.code_path / "tests" / "performance" / "benchmark.py",
"graph",
summary_json_path,
"-o",
report_html_path,
]
worker.utils.call(cmd)
def benchmark(builder: worker.blender.CodeBuilder) -> None:
# Parameters
os.chdir(builder.code_path)
revision = worker.utils.check_output(["git", "rev-parse", "HEAD"])
revision = revision[:12]
blender_command = builder.blender_command_path()
gpu_device = "METAL" if builder.platform == "darwin" else "OPTIX"
background = False if builder.platform == "darwin" else True
worker.utils.info(f"Benchmark revision {revision}, GPU device {gpu_device}")
# Create clean benchmark folder
benchmark_path = builder.track_path / "benchmark" / "default"
worker.utils.remove_dir(benchmark_path)
os.makedirs(benchmark_path, exist_ok=True)
# Initialize configuration
config_py_path = benchmark_path / "config.py"
config_py_text = f"""
devices = ["CPU", "{gpu_device}_0"]
background = {background}
builds = {{"{revision}": "{blender_command}"}}
benchmark_type = "time_series"
"""
config_py_path.write_text(config_py_text)
# Checkout benchmark files
tests_benchmarks_path = builder.code_path / "tests" / "benchmarks"
if not tests_benchmarks_path.exists():
benchmarks_url = "https://projects.blender.org/blender/blender-benchmarks.git"
worker.utils.call(["git", "clone", benchmarks_url, tests_benchmarks_path])
# Run benchmark
cmd = [
sys.executable,
builder.code_path / "tests" / "performance" / "benchmark.py",
"list",
]
worker.utils.call(cmd)
cmd = [
sys.executable,
builder.code_path / "tests" / "performance" / "benchmark.py",
"run",
"default",
]
exit_code = worker.utils.call(cmd, exit_on_error=False)
# Write results to be uploaded
create_upload(builder, benchmark_path, revision)
sys.exit(exit_code)