808 lines
29 KiB
Python
808 lines
29 KiB
Python
# SPDX-License-Identifier: GPL-2.0-or-later
|
|
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
|
|
# <pep8 compliant>
|
|
|
|
from functools import partial
|
|
import pathlib
|
|
import random
|
|
|
|
import buildbot.plugins
|
|
|
|
from buildbot.plugins import steps as plugins_steps
|
|
from buildbot.plugins import schedulers as plugins_schedulers
|
|
|
|
import conf.branches
|
|
import conf.worker
|
|
|
|
import pipeline.common
|
|
import gitea.reporter
|
|
|
|
# Timeouts.
|
|
default_step_timeout_in_seconds = 10 * 60
|
|
# TODO: Compile step needs more because of the link on Windows
|
|
compile_code_step_timeout_in_seconds = 10 * 60
|
|
compile_gpu_step_timeout_in_seconds = 1.5 * 60 * 60
|
|
|
|
tree_stable_timer_in_seconds = 15 * 60
|
|
|
|
package_step_timeout_in_seconds = 20 * 60
|
|
|
|
# Build steps.
|
|
code_pipeline_general_step_names = [
|
|
"configure-machine",
|
|
"update-code",
|
|
"compile-code",
|
|
"compile-gpu",
|
|
"compile-install",
|
|
"test-code",
|
|
"sign-code-binaries",
|
|
"package-code-binaries",
|
|
"deliver-code-binaries",
|
|
"deliver-test-results",
|
|
"clean",
|
|
]
|
|
|
|
code_pipeline_daily_step_names = code_pipeline_general_step_names
|
|
|
|
code_pipeline_patch_step_names = [
|
|
"configure-machine",
|
|
"update-code",
|
|
"compile-code",
|
|
"compile-gpu",
|
|
"compile-install",
|
|
"test-code",
|
|
"sign-code-binaries",
|
|
"package-code-binaries",
|
|
"deliver-code-binaries",
|
|
"deliver-test-results",
|
|
"clean",
|
|
]
|
|
|
|
code_pipeline_experimental_step_names = code_pipeline_general_step_names
|
|
|
|
pipeline_types_step_names = {
|
|
"daily": code_pipeline_daily_step_names,
|
|
"patch": code_pipeline_patch_step_names,
|
|
"experimental": code_pipeline_experimental_step_names,
|
|
}
|
|
|
|
code_pipeline_lint_step_names = [
|
|
"configure-machine",
|
|
"update-code",
|
|
"lint-code",
|
|
]
|
|
|
|
# Steps for testing.
|
|
code_pipeline_test_step_names = [
|
|
"test-code",
|
|
]
|
|
|
|
# Steps for package delivery.
|
|
code_delivery_step_names = [
|
|
"sign-code-binaries",
|
|
"package-code-binaries",
|
|
"deliver-code-binaries",
|
|
]
|
|
|
|
# Steps skipped for Python module.
|
|
code_python_module_skip_test_names = ["sign-code-binaries"]
|
|
|
|
|
|
# Tracks.
|
|
code_tracked_branch_ids = conf.branches.code_tracked_branch_ids
|
|
code_track_ids = list(code_tracked_branch_ids.keys())
|
|
code_all_platform_architectures = conf.branches.code_all_platform_architectures
|
|
code_official_platform_architectures = (
|
|
conf.branches.code_official_platform_architectures
|
|
)
|
|
|
|
code_track_pipeline_types = {}
|
|
track_properties = {}
|
|
for track, branch in code_tracked_branch_ids.items():
|
|
if track == "vdev":
|
|
code_track_pipeline_types[track] = ["daily"]
|
|
elif track == "vexp":
|
|
code_track_pipeline_types[track] = ["experimental", "patch"]
|
|
else:
|
|
code_track_pipeline_types[track] = ["daily"]
|
|
|
|
# Track properties.
|
|
track_properties[track] = [
|
|
buildbot.plugins.util.ChoiceStringParameter(
|
|
name="platform_architectures",
|
|
label="Platforms:",
|
|
required=True,
|
|
choices=code_all_platform_architectures[track],
|
|
multiple=True,
|
|
strict=True,
|
|
default=code_official_platform_architectures[track],
|
|
),
|
|
]
|
|
|
|
# Scheduler properties.
|
|
scheduler_properties_common = [
|
|
buildbot.plugins.util.BooleanParameter(
|
|
name="python_module",
|
|
label="Python module -> build bpy module instead of Blender",
|
|
required=True,
|
|
strict=True,
|
|
default=False,
|
|
),
|
|
buildbot.plugins.util.BooleanParameter(
|
|
name="needs_full_clean",
|
|
label="Full clean -> removes build workspace on machine",
|
|
required=True,
|
|
strict=True,
|
|
default=False,
|
|
),
|
|
buildbot.plugins.util.BooleanParameter(
|
|
name="needs_package_delivery",
|
|
label="Package delivery -> push files to configured services",
|
|
required=True,
|
|
strict=True,
|
|
default=False,
|
|
),
|
|
buildbot.plugins.util.BooleanParameter(
|
|
name="needs_gpu_binaries",
|
|
label="GPU binaries -> build Cycles GPU kernels",
|
|
required=True,
|
|
strict=True,
|
|
default=False,
|
|
),
|
|
buildbot.plugins.util.BooleanParameter(
|
|
name="needs_gpu_tests",
|
|
label="GPU tests -> run EEVEE, Viewport and Cycles GPU tests",
|
|
required=True,
|
|
strict=True,
|
|
default=False,
|
|
),
|
|
]
|
|
|
|
# code-daily
|
|
scheduler_properties_daily = scheduler_properties_common
|
|
|
|
# code-experimental properties.
|
|
scheduler_properties_experimental = [
|
|
buildbot.plugins.util.StringParameter(
|
|
name="override_branch_id",
|
|
label="Branch:",
|
|
required=True,
|
|
size=80,
|
|
regex=r"^[a-zA-Z0-9][A-Za-z0-9\._-]*$",
|
|
default="",
|
|
),
|
|
buildbot.plugins.util.ChoiceStringParameter(
|
|
name="build_configuration",
|
|
label="Configuration:",
|
|
required=True,
|
|
choices=["release", "sanitizer", "debug"],
|
|
multiple=False,
|
|
strict=True,
|
|
default="release",
|
|
),
|
|
buildbot.plugins.util.BooleanParameter(
|
|
name="needs_skip_tests",
|
|
label="Skip tests -> bypass running all tests",
|
|
required=True,
|
|
strict=True,
|
|
default=False,
|
|
),
|
|
]
|
|
scheduler_properties_experimental += scheduler_properties_common
|
|
|
|
|
|
# code-patch properties.
|
|
scheduler_properties_patch = [
|
|
buildbot.plugins.util.StringParameter(
|
|
name="patch_id", label="Patch Id:", required=True, size=80, default=""
|
|
),
|
|
buildbot.plugins.util.ChoiceStringParameter(
|
|
name="build_configuration",
|
|
label="Configuration:",
|
|
required=True,
|
|
choices=["release", "sanitizer", "debug"],
|
|
multiple=False,
|
|
strict=True,
|
|
default="release",
|
|
),
|
|
buildbot.plugins.util.BooleanParameter(
|
|
name="needs_skip_tests",
|
|
label="Skip tests -> bypass running all tests",
|
|
required=True,
|
|
strict=True,
|
|
default=False,
|
|
),
|
|
buildbot.plugins.util.StringParameter(
|
|
name="pull_revision",
|
|
label="Pull Revision:",
|
|
required=False,
|
|
hide=True,
|
|
size=80,
|
|
default="",
|
|
),
|
|
]
|
|
|
|
scheduler_properties_patch += scheduler_properties_common
|
|
|
|
scheduler_properties = {
|
|
"code-daily": scheduler_properties_daily,
|
|
"code-experimental": scheduler_properties_experimental,
|
|
"code-patch": scheduler_properties_patch,
|
|
}
|
|
|
|
|
|
@buildbot.plugins.util.renderer
|
|
def create_code_worker_command_args(
|
|
props, ENVIRONMENT, track_id, pipeline_type, step_name
|
|
):
|
|
commit_id = pipeline.common.fetch_property(props, key="revision", default="HEAD")
|
|
patch_id = pipeline.common.fetch_property(props, key="patch_id", default="")
|
|
override_branch_id = pipeline.common.fetch_property(
|
|
props, key="override_branch_id", default=""
|
|
)
|
|
python_module = pipeline.common.fetch_property(
|
|
props, key="python_module", default=False
|
|
)
|
|
needs_gpu_tests = pipeline.common.fetch_property(
|
|
props, key="needs_gpu_tests", default=False
|
|
)
|
|
needs_gpu_binaries = pipeline.common.fetch_property(
|
|
props, key="needs_gpu_binaries", default=False
|
|
)
|
|
build_configuration = pipeline.common.fetch_property(
|
|
props, key="build_configuration", default="release"
|
|
)
|
|
needs_full_clean = pipeline.common.fetch_property(
|
|
props, key="needs_full_clean", default="false"
|
|
)
|
|
needs_full_clean = needs_full_clean in ["true", True]
|
|
needs_package_delivery = pipeline.common.fetch_property(
|
|
props, key="needs_package_delivery", default="false"
|
|
)
|
|
needs_package_delivery = needs_package_delivery in ["true", True]
|
|
|
|
# Auto enable asserts when not using package delivery. Only support in 4.1+.
|
|
if track_id not in ("v360"):
|
|
if build_configuration == "release" and not needs_package_delivery:
|
|
build_configuration = "asserts"
|
|
|
|
platform_id, architecture = pipeline.common.fetch_platform_architecture(props)
|
|
|
|
args = []
|
|
|
|
if architecture:
|
|
args += ["--architecture", architecture]
|
|
|
|
if pipeline_type == "patch":
|
|
# Powershell doesn't like # in string argument so strip it.
|
|
args += ["--patch-id", patch_id.lstrip("#")]
|
|
elif pipeline_type == "experimental":
|
|
args += ["--branch-id", override_branch_id]
|
|
|
|
args += ["--commit-id", commit_id]
|
|
args += ["--build-configuration", build_configuration]
|
|
|
|
if python_module:
|
|
args += ["--python-module"]
|
|
if needs_full_clean:
|
|
args += ["--needs-full-clean"]
|
|
if step_name in ["compile-gpu", "compile-install", "test-code"]:
|
|
if needs_package_delivery or needs_gpu_binaries:
|
|
args += ["--needs-gpu-binaries"]
|
|
if needs_gpu_tests:
|
|
args += ["--needs-gpu-tests"]
|
|
|
|
args += [step_name]
|
|
|
|
return pipeline.common.create_worker_command("code.py", ENVIRONMENT, track_id, args)
|
|
|
|
|
|
def needs_do_code_pipeline_step(step):
|
|
# Use this to test master steps only, otherwise we be waiting for 30 minutes
|
|
needs_master_steps_only = False
|
|
|
|
if needs_master_steps_only:
|
|
is_master_step = step.name in pipeline.common.code_pipeline_master_step_names
|
|
return is_master_step
|
|
|
|
step.getWorkerName()
|
|
|
|
is_package_delivery_step = (step.name in code_delivery_step_names) or (
|
|
step.name in pipeline.common.code_pipeline_master_step_names
|
|
)
|
|
needs_package_delivery = step.getProperty("needs_package_delivery")
|
|
needs_gpu_binaries = step.getProperty("needs_gpu_binaries")
|
|
needs_skip_tests = step.getProperty("needs_skip_tests")
|
|
|
|
python_module = step.getProperty("python_module")
|
|
|
|
needs_do_it = True
|
|
|
|
if step.name in code_pipeline_test_step_names:
|
|
needs_do_it = not needs_skip_tests
|
|
elif step.name == "compile-gpu":
|
|
needs_do_it = needs_package_delivery or needs_gpu_binaries
|
|
elif is_package_delivery_step:
|
|
needs_do_it = needs_package_delivery
|
|
|
|
if python_module and (step.name in code_python_module_skip_test_names):
|
|
needs_do_it = False
|
|
|
|
return needs_do_it
|
|
|
|
|
|
# Custom file upload that shows links to download files.
|
|
class LinkMultipleFileUpload(plugins_steps.MultipleFileUpload):
|
|
def uploadDone(self, result, source, masterdest):
|
|
if not self.url:
|
|
return
|
|
|
|
name = pathlib.Path(source).name
|
|
if name.endswith(".zip"):
|
|
self.addURL(name, self.url + "/" + name)
|
|
else:
|
|
self.addURL(name, self.url + "/" + name + "/report.html")
|
|
|
|
def allUploadsDone(self, result, sources, masterdest):
|
|
return
|
|
|
|
|
|
def create_deliver_code_binaries_step(worker_config, track_id, pipeline_type):
|
|
file_size_in_mb = 500 * 1024 * 1024
|
|
worker_source_path = pathlib.Path(
|
|
f"../../../../git/blender-{track_id}/build_package"
|
|
)
|
|
master_dest_path = pathlib.Path(
|
|
f"{worker_config.buildbot_download_folder}/{pipeline_type}"
|
|
).expanduser()
|
|
|
|
return plugins_steps.MultipleFileUpload(
|
|
name="deliver-code-binaries",
|
|
maxsize=file_size_in_mb,
|
|
workdir=f"{worker_source_path}",
|
|
glob=True,
|
|
workersrcs=["*.*"],
|
|
masterdest=f"{master_dest_path}",
|
|
mode=0o644,
|
|
url=None,
|
|
description="running",
|
|
descriptionDone="completed",
|
|
doStepIf=needs_do_code_pipeline_step,
|
|
)
|
|
|
|
|
|
def create_deliver_test_results_step(worker_config, track_id, pipeline_type):
|
|
file_size_in_mb = 500 * 1024 * 1024
|
|
worker_source_path = pathlib.Path(
|
|
f"../../../../git/blender-{track_id}/build_package"
|
|
)
|
|
master_dest_path = pathlib.Path(
|
|
f"{worker_config.buildbot_download_folder}/{pipeline_type}"
|
|
).expanduser()
|
|
|
|
tests_worker_source_path = worker_source_path / "tests"
|
|
tests_master_dest_path = master_dest_path / "tests"
|
|
tests_worker_srcs = ["tests-*.zip"]
|
|
|
|
branch_id = code_tracked_branch_ids[track_id]
|
|
if branch_id:
|
|
branch_id = branch_id.replace("blender-", "").replace("-release", "")
|
|
tests_worker_srcs.append(branch_id + "-*")
|
|
|
|
return LinkMultipleFileUpload(
|
|
name="deliver-test-results",
|
|
maxsize=file_size_in_mb,
|
|
workdir=f"{tests_worker_source_path}",
|
|
glob=True,
|
|
workersrcs=tests_worker_srcs,
|
|
masterdest=f"{tests_master_dest_path}",
|
|
mode=0o644,
|
|
url=f"../download/{pipeline_type}/tests",
|
|
description="running",
|
|
descriptionDone="completed",
|
|
alwaysRun=True,
|
|
)
|
|
|
|
|
|
def next_worker_code(worker_names_gpu, builder, workers, request):
|
|
# Use a GPU worker if needed and supported for this platform.
|
|
# NVIDIA worker is currently reserved for GPU builds only.
|
|
compatible_workers = []
|
|
if request.properties.getProperty("needs_gpu_tests", False) and worker_names_gpu:
|
|
for worker in workers:
|
|
if worker.worker.workername in worker_names_gpu:
|
|
compatible_workers.append(worker)
|
|
else:
|
|
for worker in workers:
|
|
if "nvidia" not in worker.worker.workername:
|
|
compatible_workers.append(worker)
|
|
|
|
if not compatible_workers:
|
|
return None
|
|
|
|
return random.choice(compatible_workers)
|
|
|
|
|
|
class PlatformTrigger(plugins_steps.Trigger):
|
|
def getSchedulersAndProperties(self):
|
|
schedulers = []
|
|
|
|
platform_architectures = self.set_properties["platform_architectures"]
|
|
|
|
for scheduler in self.schedulerNames:
|
|
found = False
|
|
if "lint" in scheduler:
|
|
found = True
|
|
for platform_architecture in platform_architectures:
|
|
if platform_architecture in scheduler:
|
|
found = True
|
|
|
|
if found:
|
|
schedulers.append(
|
|
{
|
|
"sched_name": scheduler,
|
|
"props_to_set": self.set_properties,
|
|
"unimportant": False,
|
|
}
|
|
)
|
|
|
|
return schedulers
|
|
|
|
|
|
def populate(ENVIRONMENT):
|
|
builders = []
|
|
schedulers = []
|
|
|
|
platform_worker_names = conf.machines.fetch_platform_worker_names(ENVIRONMENT)
|
|
local_worker_names = conf.machines.fetch_local_worker_names()
|
|
|
|
worker_config = conf.worker.get_config(ENVIRONMENT)
|
|
|
|
needs_incremental_schedulers = ENVIRONMENT in ["PROD"]
|
|
needs_nightly_schedulers = ENVIRONMENT in ["PROD"]
|
|
|
|
print("*** Creating [code] pipeline")
|
|
for track_id in code_track_ids:
|
|
pipeline_types = code_track_pipeline_types[track_id]
|
|
for pipeline_type in pipeline_types:
|
|
# Create steps.
|
|
step_names = pipeline_types_step_names[pipeline_type]
|
|
pipeline_build_factory = buildbot.plugins.util.BuildFactory()
|
|
|
|
print(f"Creating [{track_id}] [code] [{pipeline_type}] pipeline steps")
|
|
for step_name in step_names:
|
|
if step_name == "deliver-code-binaries":
|
|
step = create_deliver_code_binaries_step(
|
|
worker_config, track_id, pipeline_type
|
|
)
|
|
elif step_name == "deliver-test-results":
|
|
step = create_deliver_test_results_step(
|
|
worker_config, track_id, pipeline_type
|
|
)
|
|
else:
|
|
needs_halt_on_failure = True
|
|
if step_name in code_pipeline_test_step_names:
|
|
needs_halt_on_failure = track_id != "vexp"
|
|
|
|
step_timeout_in_seconds = default_step_timeout_in_seconds
|
|
if step_name == "compile-code":
|
|
step_timeout_in_seconds = compile_code_step_timeout_in_seconds
|
|
elif step_name == "compile-gpu":
|
|
step_timeout_in_seconds = compile_gpu_step_timeout_in_seconds
|
|
|
|
step_command = create_code_worker_command_args.withArgs(
|
|
ENVIRONMENT, track_id, pipeline_type, step_name
|
|
)
|
|
|
|
step = buildbot.plugins.steps.ShellCommand(
|
|
name=step_name,
|
|
logEnviron=True,
|
|
haltOnFailure=needs_halt_on_failure,
|
|
timeout=step_timeout_in_seconds,
|
|
description="running",
|
|
descriptionDone="completed",
|
|
doStepIf=needs_do_code_pipeline_step,
|
|
command=step_command,
|
|
)
|
|
|
|
pipeline_build_factory.addStep(step)
|
|
|
|
for master_step_name in pipeline.common.code_pipeline_master_step_names:
|
|
master_step_command = (
|
|
pipeline.common.create_master_command_args.withArgs(
|
|
ENVIRONMENT,
|
|
track_id,
|
|
pipeline_type,
|
|
master_step_name,
|
|
single_platform=True,
|
|
)
|
|
)
|
|
|
|
# Master to archive and purge builds
|
|
pipeline_build_factory.addStep(
|
|
plugins_steps.MasterShellCommand(
|
|
name=master_step_name,
|
|
logEnviron=False,
|
|
command=master_step_command,
|
|
description="running",
|
|
descriptionDone="completed",
|
|
doStepIf=needs_do_code_pipeline_step,
|
|
)
|
|
)
|
|
|
|
# Create lint pipeline
|
|
pipeline_lint_factory = buildbot.plugins.util.BuildFactory()
|
|
for step_name in code_pipeline_lint_step_names:
|
|
step_command = create_code_worker_command_args.withArgs(
|
|
ENVIRONMENT, track_id, pipeline_type, step_name
|
|
)
|
|
|
|
pipeline_lint_factory.addStep(
|
|
buildbot.plugins.steps.ShellCommand(
|
|
name=step_name,
|
|
logEnviron=True,
|
|
haltOnFailure=True,
|
|
timeout=default_step_timeout_in_seconds,
|
|
description="running",
|
|
descriptionDone="completed",
|
|
command=step_command,
|
|
)
|
|
)
|
|
|
|
triggerable_scheduler_names = []
|
|
trigger_factory = buildbot.plugins.util.BuildFactory()
|
|
|
|
# Create builders.
|
|
for platform_architecture in code_all_platform_architectures[track_id]:
|
|
print(
|
|
f"Creating [{track_id}] [{pipeline_type}] [{platform_architecture}] builders"
|
|
)
|
|
|
|
worker_group_id = f"{platform_architecture}-code"
|
|
worker_group_id_gpu = f"{platform_architecture}-code-gpu"
|
|
|
|
pipeline_worker_names = platform_worker_names[worker_group_id]
|
|
pipeline_worker_names_gpu = platform_worker_names[worker_group_id_gpu]
|
|
if pipeline_worker_names:
|
|
# Only create the builders if the worker exists
|
|
pipeline_builder_name = (
|
|
f"{track_id}-code-{pipeline_type}-{platform_architecture}"
|
|
)
|
|
pipeline_builder_tags = pipeline_builder_name.split("-")
|
|
|
|
# Assigning different workers for different tracks, specifically Linux builders.
|
|
suitable_pipeline_worker_names = pipeline_worker_names
|
|
if (
|
|
platform_architecture == "linux-x86_64"
|
|
and ENVIRONMENT != "LOCAL"
|
|
):
|
|
selector = "rocky"
|
|
suitable_pipeline_worker_names = [
|
|
worker
|
|
for worker in pipeline_worker_names
|
|
if selector in worker
|
|
]
|
|
|
|
builders += [
|
|
buildbot.plugins.util.BuilderConfig(
|
|
name=pipeline_builder_name,
|
|
workernames=suitable_pipeline_worker_names,
|
|
nextWorker=partial(
|
|
next_worker_code, pipeline_worker_names_gpu
|
|
),
|
|
tags=pipeline_builder_tags,
|
|
factory=pipeline_build_factory,
|
|
)
|
|
]
|
|
|
|
pipeline_scheduler_name = f"{track_id}-code-{pipeline_type}-{platform_architecture}-triggerable"
|
|
triggerable_scheduler_names += [pipeline_scheduler_name]
|
|
|
|
schedulers += [
|
|
plugins_schedulers.Triggerable(
|
|
name=pipeline_scheduler_name,
|
|
builderNames=[pipeline_builder_name],
|
|
)
|
|
]
|
|
|
|
# Create lint builder
|
|
if track_id not in conf.branches.all_lts_tracks:
|
|
print(f"Creating [{track_id}] [{pipeline_type}] [lint] builders")
|
|
|
|
pipeline_worker_names = platform_worker_names["code-lint"]
|
|
if pipeline_worker_names:
|
|
# Only create the builders if the worker exists
|
|
pipeline_builder_name = f"{track_id}-code-{pipeline_type}-lint"
|
|
pipeline_builder_tags = pipeline_builder_name.split("-")
|
|
|
|
builders += [
|
|
buildbot.plugins.util.BuilderConfig(
|
|
name=pipeline_builder_name,
|
|
workernames=pipeline_worker_names,
|
|
tags=pipeline_builder_tags,
|
|
factory=pipeline_lint_factory,
|
|
)
|
|
]
|
|
|
|
pipeline_scheduler_name = (
|
|
f"{track_id}-code-{pipeline_type}-lint-triggerable"
|
|
)
|
|
triggerable_scheduler_names += [pipeline_scheduler_name]
|
|
|
|
schedulers += [
|
|
plugins_schedulers.Triggerable(
|
|
name=pipeline_scheduler_name,
|
|
builderNames=[pipeline_builder_name],
|
|
)
|
|
]
|
|
|
|
# Create coordinator.
|
|
if triggerable_scheduler_names:
|
|
trigger_properties = {
|
|
"python_module": buildbot.plugins.util.Property("python_module"),
|
|
"needs_full_clean": buildbot.plugins.util.Property(
|
|
"needs_full_clean"
|
|
),
|
|
"needs_package_delivery": buildbot.plugins.util.Property(
|
|
"needs_package_delivery"
|
|
),
|
|
"needs_gpu_binaries": buildbot.plugins.util.Property(
|
|
"needs_gpu_binaries"
|
|
),
|
|
"needs_gpu_tests": buildbot.plugins.util.Property(
|
|
"needs_gpu_tests"
|
|
),
|
|
"needs_skip_tests": buildbot.plugins.util.Property(
|
|
"needs_skip_tests"
|
|
),
|
|
"platform_architectures": buildbot.plugins.util.Property(
|
|
"platform_architectures"
|
|
),
|
|
}
|
|
if pipeline_type == "patch":
|
|
trigger_properties["patch_id"] = buildbot.plugins.util.Property(
|
|
"patch_id"
|
|
)
|
|
trigger_properties["revision"] = buildbot.plugins.util.Property(
|
|
"revision"
|
|
)
|
|
trigger_properties["build_configuration"] = (
|
|
buildbot.plugins.util.Property("build_configuration")
|
|
)
|
|
trigger_factory.addStep(
|
|
plugins_steps.SetProperties(
|
|
name="get-revision",
|
|
properties=gitea.blender.get_patch_revision,
|
|
)
|
|
)
|
|
elif pipeline_type == "experimental":
|
|
trigger_properties["override_branch_id"] = (
|
|
buildbot.plugins.util.Property("override_branch_id")
|
|
)
|
|
trigger_properties["revision"] = buildbot.plugins.util.Property(
|
|
"revision"
|
|
)
|
|
trigger_properties["build_configuration"] = (
|
|
buildbot.plugins.util.Property("build_configuration")
|
|
)
|
|
trigger_factory.addStep(
|
|
plugins_steps.SetProperties(
|
|
name="get-revision",
|
|
properties=gitea.blender.get_branch_revision,
|
|
)
|
|
)
|
|
|
|
trigger_factory.addStep(
|
|
PlatformTrigger(
|
|
schedulerNames=triggerable_scheduler_names,
|
|
waitForFinish=True,
|
|
updateSourceStamp=False,
|
|
set_properties=trigger_properties,
|
|
description="running",
|
|
descriptionDone="completed",
|
|
)
|
|
)
|
|
|
|
coordinator_builder_name = (
|
|
f"{track_id}-code-{pipeline_type}-coordinator"
|
|
)
|
|
builder_tags = coordinator_builder_name.split("-")
|
|
|
|
builders += [
|
|
buildbot.plugins.util.BuilderConfig(
|
|
name=coordinator_builder_name,
|
|
workernames=local_worker_names,
|
|
tags=builder_tags,
|
|
factory=trigger_factory,
|
|
)
|
|
]
|
|
|
|
coordinator_scheduler_name = (
|
|
f"{track_id}-code-{pipeline_type}-coordinator-force"
|
|
)
|
|
schedulers += [
|
|
plugins_schedulers.ForceScheduler(
|
|
name=coordinator_scheduler_name,
|
|
buttonName=f"Trigger {pipeline_type} build",
|
|
builderNames=[coordinator_builder_name],
|
|
codebases=[
|
|
buildbot.plugins.util.CodebaseParameter(
|
|
codebase="blender.git",
|
|
project="blender.git",
|
|
branch=code_tracked_branch_ids[track_id],
|
|
hide=True,
|
|
)
|
|
],
|
|
properties=track_properties[track_id]
|
|
+ scheduler_properties[f"code-{pipeline_type}"],
|
|
)
|
|
]
|
|
|
|
# Daily scheduler.
|
|
if pipeline_type == "daily":
|
|
print(f"Adding [{pipeline_type}] schedulers")
|
|
if needs_incremental_schedulers and (track_id in code_track_ids):
|
|
incremental_scheduler_name = (
|
|
f"{track_id}-code-{pipeline_type}-coordinator-incremental"
|
|
)
|
|
incremental_scheduler_properties = {
|
|
"revision": "HEAD",
|
|
"python_module": False,
|
|
"needs_skip_tests": False,
|
|
"needs_package_delivery": False,
|
|
"needs_gpu_binaries": False,
|
|
"build_configuration": "release",
|
|
"platform_architectures": code_official_platform_architectures[
|
|
track_id
|
|
],
|
|
}
|
|
|
|
change_filter = buildbot.plugins.util.ChangeFilter(
|
|
project=["blender.git"],
|
|
branch=code_tracked_branch_ids[track_id],
|
|
)
|
|
schedulers += [
|
|
plugins_schedulers.SingleBranchScheduler(
|
|
name=incremental_scheduler_name,
|
|
builderNames=[coordinator_builder_name],
|
|
change_filter=change_filter,
|
|
properties=incremental_scheduler_properties,
|
|
treeStableTimer=tree_stable_timer_in_seconds,
|
|
)
|
|
]
|
|
|
|
if needs_nightly_schedulers and (track_id in code_track_ids):
|
|
nightly_scheduler_name = (
|
|
f"{track_id}-code-{pipeline_type}-coordinator-nightly"
|
|
)
|
|
nightly_properties = {
|
|
"revision": "HEAD",
|
|
"python_module": False,
|
|
"needs_skip_tests": False,
|
|
"needs_package_delivery": True,
|
|
"needs_gpu_binaries": True,
|
|
"build_configuration": "release",
|
|
"platform_architectures": code_all_platform_architectures[
|
|
track_id
|
|
],
|
|
}
|
|
nightly_codebases = {
|
|
"blender.git": {
|
|
"repository": "",
|
|
"branch": code_tracked_branch_ids[track_id],
|
|
"revision": None,
|
|
}
|
|
}
|
|
schedulers += [
|
|
plugins_schedulers.Nightly(
|
|
name=nightly_scheduler_name,
|
|
builderNames=[coordinator_builder_name],
|
|
codebases=nightly_codebases,
|
|
properties=nightly_properties,
|
|
onlyIfChanged=False,
|
|
hour=1,
|
|
minute=30,
|
|
)
|
|
]
|
|
|
|
return builders, schedulers
|