Add back further changes from blender-devops

This commit is contained in:
Bart van der Braak 2024-11-19 21:41:39 +01:00
parent 18e653fd2e
commit 0a1454d250
61 changed files with 7917 additions and 1 deletions

101
config/pipeline/__init__.py Normal file
View file

@ -0,0 +1,101 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
import importlib
from buildbot.plugins import changes as plugins_changes
import conf.branches
import pipeline.common
import pipeline.code
import pipeline.code_benchmark
import pipeline.code_deploy
import pipeline.code_bpy_deploy
import pipeline.code_store
import pipeline.doc_api
import pipeline.doc_manual
import pipeline.doc_developer
import pipeline.doc_studio
importlib.reload(pipeline.common)
importlib.reload(conf.branches)
def populate(devops_env_id):
pipelines_modules = [
pipeline.code,
pipeline.code_benchmark,
pipeline.code_deploy,
pipeline.code_bpy_deploy,
pipeline.code_store,
pipeline.doc_api,
pipeline.doc_manual,
pipeline.doc_developer,
pipeline.doc_studio,
]
builders = []
schedulers = []
for pipelines_module in pipelines_modules:
importlib.reload(pipelines_module)
b, s = pipelines_module.populate(devops_env_id)
builders += b
schedulers += s
return builders, schedulers
def change_sources():
branch_ids = list(conf.branches.code_tracked_branch_ids.values())
pollers = []
poll_interval_in_seconds = 2 * 60
pollers += [
plugins_changes.GitPoller(
repourl="https://projects.blender.org/blender/blender.git",
pollAtLaunch=True,
pollInterval=poll_interval_in_seconds,
workdir="blender-gitpoller-workdir",
project="blender.git",
branches=branch_ids,
)
]
pollers += [
plugins_changes.GitPoller(
repourl="https://projects.blender.org/blender/blender-manual.git",
pollAtLaunch=True,
pollInterval=poll_interval_in_seconds,
workdir="blender-manual-gitpoller-workdir",
project="blender-manual.git",
branches=branch_ids,
)
]
pollers += [
plugins_changes.GitPoller(
repourl="https://projects.blender.org/blender/blender-developer-docs.git",
pollAtLaunch=True,
pollInterval=poll_interval_in_seconds,
workdir="blender-developer-docs-gitpoller-workdir",
project="blender-developer-docs.git",
branches=["main"],
)
]
pollers += [
plugins_changes.GitPoller(
repourl="https://projects.blender.org/studio/blender-studio-tools.git",
pollAtLaunch=True,
pollInterval=poll_interval_in_seconds,
workdir="blender-studio-tools-gitpoller-workdir",
project="blender-studio-tools.git",
branches=["main"],
)
]
return pollers

748
config/pipeline/code.py Normal file
View file

@ -0,0 +1,748 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
from functools import partial
import pathlib
import random
import buildbot.plugins
from buildbot.plugins import steps as plugins_steps
from buildbot.plugins import schedulers as plugins_schedulers
import conf.branches
import conf.worker
import pipeline.common
import gitea.reporter
# Timeouts.
default_step_timeout_in_seconds = 10 * 60
# TODO: Compile step needs more because of the link on Windows
compile_code_step_timeout_in_seconds = 10 * 60
compile_gpu_step_timeout_in_seconds = 1.5 * 60 * 60
tree_stable_timer_in_seconds = 15 * 60
package_step_timeout_in_seconds = 20 * 60
# Build steps.
code_pipeline_general_step_names = [
"configure-machine",
"update-code",
"compile-code",
"compile-gpu",
"compile-install",
"test-code",
"sign-code-binaries",
"package-code-binaries",
"deliver-code-binaries",
"deliver-test-results",
"clean",
]
code_pipeline_daily_step_names = code_pipeline_general_step_names
code_pipeline_patch_step_names = [
"configure-machine",
"update-code",
"compile-code",
"compile-gpu",
"compile-install",
"test-code",
"sign-code-binaries",
"package-code-binaries",
"deliver-code-binaries",
"deliver-test-results",
"clean",
]
code_pipeline_experimental_step_names = code_pipeline_general_step_names
pipeline_types_step_names = {
"daily": code_pipeline_daily_step_names,
"patch": code_pipeline_patch_step_names,
"experimental": code_pipeline_experimental_step_names,
}
code_pipeline_lint_step_names = [
"configure-machine",
"update-code",
"lint-code",
]
# Steps for testing.
code_pipeline_test_step_names = [
"test-code",
]
# Steps for package delivery.
code_delivery_step_names = [
"sign-code-binaries",
"package-code-binaries",
"deliver-code-binaries",
]
# Steps skipped for Python module.
code_python_module_skip_test_names = ["sign-code-binaries"]
# Tracks.
code_tracked_branch_ids = conf.branches.code_tracked_branch_ids
code_track_ids = list(code_tracked_branch_ids.keys())
code_all_platform_architectures = conf.branches.code_all_platform_architectures
code_official_platform_architectures = conf.branches.code_official_platform_architectures
code_track_pipeline_types = {}
track_properties = {}
for track, branch in code_tracked_branch_ids.items():
if track == "vdev":
code_track_pipeline_types[track] = ["daily"]
elif track == "vexp":
code_track_pipeline_types[track] = ["experimental", "patch"]
else:
code_track_pipeline_types[track] = ["daily"]
# Track properties.
track_properties[track] = [
buildbot.plugins.util.ChoiceStringParameter(
name="platform_architectures",
label="Platforms:",
required=True,
choices=code_all_platform_architectures[track],
multiple=True,
strict=True,
default=code_official_platform_architectures[track],
),
]
# Scheduler properties.
scheduler_properties_common = [
buildbot.plugins.util.BooleanParameter(
name="python_module",
label="Python module -> build bpy module instead of Blender",
required=True,
strict=True,
default=False,
),
buildbot.plugins.util.BooleanParameter(
name="needs_full_clean",
label="Full clean -> removes build workspace on machine",
required=True,
strict=True,
default=False,
),
buildbot.plugins.util.BooleanParameter(
name="needs_package_delivery",
label="Package delivery -> push files to configured services",
required=True,
strict=True,
default=False,
),
buildbot.plugins.util.BooleanParameter(
name="needs_gpu_binaries",
label="GPU binaries -> build Cycles GPU kernels",
required=True,
strict=True,
default=False,
),
buildbot.plugins.util.BooleanParameter(
name="needs_gpu_tests",
label="GPU tests -> run EEVEE, Viewport and Cycles GPU tests",
required=True,
strict=True,
default=False,
),
]
# code-daily
scheduler_properties_daily = scheduler_properties_common
# code-experimental properties.
scheduler_properties_experimental = [
buildbot.plugins.util.StringParameter(
name="override_branch_id",
label="Branch:",
required=True,
size=80,
regex=r"^[a-zA-Z0-9][A-Za-z0-9\._-]*$",
default="",
),
buildbot.plugins.util.ChoiceStringParameter(
name="build_configuration",
label="Configuration:",
required=True,
choices=["release", "sanitizer", "debug"],
multiple=False,
strict=True,
default="release",
),
buildbot.plugins.util.BooleanParameter(
name="needs_skip_tests",
label="Skip tests -> bypass running all tests",
required=True,
strict=True,
default=False,
),
]
scheduler_properties_experimental += scheduler_properties_common
# code-patch properties.
scheduler_properties_patch = [
buildbot.plugins.util.StringParameter(
name="patch_id", label="Patch Id:", required=True, size=80, default=""
),
buildbot.plugins.util.ChoiceStringParameter(
name="build_configuration",
label="Configuration:",
required=True,
choices=["release", "sanitizer", "debug"],
multiple=False,
strict=True,
default="release",
),
buildbot.plugins.util.BooleanParameter(
name="needs_skip_tests",
label="Skip tests -> bypass running all tests",
required=True,
strict=True,
default=False,
),
buildbot.plugins.util.StringParameter(
name="pull_revision", label="Pull Revision:", required=False, hide=True, size=80, default=""
),
]
scheduler_properties_patch += scheduler_properties_common
scheduler_properties = {
"code-daily": scheduler_properties_daily,
"code-experimental": scheduler_properties_experimental,
"code-patch": scheduler_properties_patch,
}
@buildbot.plugins.util.renderer
def create_code_worker_command_args(props, devops_env_id, track_id, pipeline_type, step_name):
commit_id = pipeline.common.fetch_property(props, key="revision", default="HEAD")
patch_id = pipeline.common.fetch_property(props, key="patch_id", default="")
override_branch_id = pipeline.common.fetch_property(props, key="override_branch_id", default="")
python_module = pipeline.common.fetch_property(props, key="python_module", default=False)
needs_gpu_tests = pipeline.common.fetch_property(props, key="needs_gpu_tests", default=False)
needs_gpu_binaries = pipeline.common.fetch_property(
props, key="needs_gpu_binaries", default=False
)
build_configuration = pipeline.common.fetch_property(
props, key="build_configuration", default="release"
)
needs_full_clean = pipeline.common.fetch_property(
props, key="needs_full_clean", default="false"
)
needs_full_clean = needs_full_clean in ["true", True]
needs_package_delivery = pipeline.common.fetch_property(
props, key="needs_package_delivery", default="false"
)
needs_package_delivery = needs_package_delivery in ["true", True]
# Auto enable asserts when not using package delivery. Only support in 4.1+.
if track_id not in ("v360"):
if build_configuration == "release" and not needs_package_delivery:
build_configuration = "asserts"
platform_id, architecture = pipeline.common.fetch_platform_architecture(props)
args = []
if architecture:
args += ["--architecture", architecture]
if pipeline_type == "patch":
# Powershell doesn't like # in string argument so strip it.
args += ["--patch-id", patch_id.lstrip("#")]
elif pipeline_type == "experimental":
args += ["--branch-id", override_branch_id]
args += ["--commit-id", commit_id]
args += ["--build-configuration", build_configuration]
if python_module:
args += ["--python-module"]
if needs_full_clean:
args += ["--needs-full-clean"]
if step_name in ["compile-gpu", "compile-install", "test-code"]:
if needs_package_delivery or needs_gpu_binaries:
args += ["--needs-gpu-binaries"]
if needs_gpu_tests:
args += ["--needs-gpu-tests"]
args += [step_name]
return pipeline.common.create_worker_command("code.py", devops_env_id, track_id, args)
def needs_do_code_pipeline_step(step):
build = step.build
# Use this to test master steps only, otherwise we be waiting for 30 minutes
needs_master_steps_only = False
if needs_master_steps_only:
is_master_step = step.name in pipeline.common.code_pipeline_master_step_names
return is_master_step
worker = step.worker
worker_name = step.getWorkerName()
worker_system = worker.worker_system
is_package_delivery_step = (step.name in code_delivery_step_names) or (
step.name in pipeline.common.code_pipeline_master_step_names
)
needs_package_delivery = step.getProperty("needs_package_delivery")
needs_gpu_binaries = step.getProperty("needs_gpu_binaries")
needs_skip_tests = step.getProperty("needs_skip_tests")
python_module = step.getProperty("python_module")
needs_do_it = True
if step.name in code_pipeline_test_step_names:
needs_do_it = not needs_skip_tests
elif step.name == "compile-gpu":
needs_do_it = needs_package_delivery or needs_gpu_binaries
elif is_package_delivery_step:
needs_do_it = needs_package_delivery
if python_module and (step.name in code_python_module_skip_test_names):
needs_do_it = False
return needs_do_it
# Custom file upload that shows links to download files.
class LinkMultipleFileUpload(plugins_steps.MultipleFileUpload):
def uploadDone(self, result, source, masterdest):
if not self.url:
return
name = pathlib.Path(source).name
if name.endswith(".zip"):
self.addURL(name, self.url + "/" + name)
else:
self.addURL(name, self.url + "/" + name + "/report.html")
def allUploadsDone(self, result, sources, masterdest):
return
def create_deliver_code_binaries_step(worker_config, track_id, pipeline_type):
file_size_in_mb = 500 * 1024 * 1024
worker_source_path = pathlib.Path(f"../../../../git/blender-{track_id}/build_package")
master_dest_path = pathlib.Path(
f"{worker_config.buildbot_download_folder}/{pipeline_type}"
).expanduser()
return plugins_steps.MultipleFileUpload(
name="deliver-code-binaries",
maxsize=file_size_in_mb,
workdir=f"{worker_source_path}",
glob=True,
workersrcs=["*.*"],
masterdest=f"{master_dest_path}",
mode=0o644,
url=None,
description="running",
descriptionDone="completed",
doStepIf=needs_do_code_pipeline_step,
)
def create_deliver_test_results_step(worker_config, track_id, pipeline_type):
file_size_in_mb = 500 * 1024 * 1024
worker_source_path = pathlib.Path(f"../../../../git/blender-{track_id}/build_package")
master_dest_path = pathlib.Path(
f"{worker_config.buildbot_download_folder}/{pipeline_type}"
).expanduser()
tests_worker_source_path = worker_source_path / "tests"
tests_master_dest_path = master_dest_path / "tests"
tests_worker_srcs = ["tests-*.zip"]
branch_id = code_tracked_branch_ids[track_id]
if branch_id:
branch_id = branch_id.replace("blender-", "").replace("-release", "")
tests_worker_srcs.append(branch_id + "-*")
return LinkMultipleFileUpload(
name="deliver-test-results",
maxsize=file_size_in_mb,
workdir=f"{tests_worker_source_path}",
glob=True,
workersrcs=tests_worker_srcs,
masterdest=f"{tests_master_dest_path}",
mode=0o644,
url=f"../download/{pipeline_type}/tests",
description="running",
descriptionDone="completed",
alwaysRun=True,
)
def next_worker_code(worker_names_gpu, builder, workers, request):
# Use a GPU worker if needed and supported for this platform.
# NVIDIA worker is currently reserved for GPU builds only.
compatible_workers = []
if request.properties.getProperty("needs_gpu_tests", False) and worker_names_gpu:
for worker in workers:
if worker.worker.workername in worker_names_gpu:
compatible_workers.append(worker)
else:
for worker in workers:
if "nvidia" not in worker.worker.workername:
compatible_workers.append(worker)
if not compatible_workers:
return None
return random.choice(compatible_workers)
class PlatformTrigger(plugins_steps.Trigger):
def getSchedulersAndProperties(self):
schedulers = []
platform_architectures = self.set_properties["platform_architectures"]
for scheduler in self.schedulerNames:
found = False
if "lint" in scheduler:
found = True
for platform_architecture in platform_architectures:
if platform_architecture in scheduler:
found = True
if found:
schedulers.append(
{
"sched_name": scheduler,
"props_to_set": self.set_properties,
"unimportant": False,
}
)
return schedulers
def populate(devops_env_id):
builders = []
schedulers = []
platform_worker_names = conf.machines.fetch_platform_worker_names(devops_env_id)
local_worker_names = conf.machines.fetch_local_worker_names()
worker_config = conf.worker.get_config(devops_env_id)
needs_incremental_schedulers = devops_env_id in ["PROD"]
needs_nightly_schedulers = devops_env_id in ["PROD"]
print("*** Creating [code] pipeline")
for track_id in code_track_ids:
pipeline_types = code_track_pipeline_types[track_id]
for pipeline_type in pipeline_types:
# Create steps.
step_names = pipeline_types_step_names[pipeline_type]
pipeline_build_factory = buildbot.plugins.util.BuildFactory()
print(f"Creating [{track_id}] [code] [{pipeline_type}] pipeline steps")
for step_name in step_names:
if step_name == "deliver-code-binaries":
step = create_deliver_code_binaries_step(worker_config, track_id, pipeline_type)
elif step_name == "deliver-test-results":
step = create_deliver_test_results_step(worker_config, track_id, pipeline_type)
else:
needs_halt_on_failure = True
if step_name in code_pipeline_test_step_names:
needs_halt_on_failure = track_id != "vexp"
step_timeout_in_seconds = default_step_timeout_in_seconds
if step_name == "compile-code":
step_timeout_in_seconds = compile_code_step_timeout_in_seconds
elif step_name == "compile-gpu":
step_timeout_in_seconds = compile_gpu_step_timeout_in_seconds
step_command = create_code_worker_command_args.withArgs(
devops_env_id, track_id, pipeline_type, step_name
)
step = buildbot.plugins.steps.ShellCommand(
name=step_name,
logEnviron=True,
haltOnFailure=needs_halt_on_failure,
timeout=step_timeout_in_seconds,
description="running",
descriptionDone="completed",
doStepIf=needs_do_code_pipeline_step,
command=step_command,
)
pipeline_build_factory.addStep(step)
for master_step_name in pipeline.common.code_pipeline_master_step_names:
master_step_command = pipeline.common.create_master_command_args.withArgs(
devops_env_id, track_id, pipeline_type, master_step_name, single_platform=True
)
# Master to archive and purge builds
pipeline_build_factory.addStep(
plugins_steps.MasterShellCommand(
name=master_step_name,
logEnviron=False,
command=master_step_command,
description="running",
descriptionDone="completed",
doStepIf=needs_do_code_pipeline_step,
)
)
# Create lint pipeline
pipeline_lint_factory = buildbot.plugins.util.BuildFactory()
for step_name in code_pipeline_lint_step_names:
step_command = create_code_worker_command_args.withArgs(
devops_env_id, track_id, pipeline_type, step_name
)
pipeline_lint_factory.addStep(
buildbot.plugins.steps.ShellCommand(
name=step_name,
logEnviron=True,
haltOnFailure=True,
timeout=default_step_timeout_in_seconds,
description="running",
descriptionDone="completed",
command=step_command,
)
)
triggerable_scheduler_names = []
trigger_factory = buildbot.plugins.util.BuildFactory()
# Create builders.
for platform_architecture in code_all_platform_architectures[track_id]:
print(f"Creating [{track_id}] [{pipeline_type}] [{platform_architecture}] builders")
worker_group_id = f"{platform_architecture}-code"
worker_group_id_gpu = f"{platform_architecture}-code-gpu"
pipeline_worker_names = platform_worker_names[worker_group_id]
pipeline_worker_names_gpu = platform_worker_names[worker_group_id_gpu]
if pipeline_worker_names:
# Only create the builders if the worker exists
pipeline_builder_name = (
f"{track_id}-code-{pipeline_type}-{platform_architecture}"
)
pipeline_builder_tags = pipeline_builder_name.split("-")
# Assigning different workers for different tracks, specifically Linux builders.
suitable_pipeline_worker_names = pipeline_worker_names
if platform_architecture == "linux-x86_64" and devops_env_id != "LOCAL":
selector = "rocky"
suitable_pipeline_worker_names = [
worker for worker in pipeline_worker_names if selector in worker
]
builders += [
buildbot.plugins.util.BuilderConfig(
name=pipeline_builder_name,
workernames=suitable_pipeline_worker_names,
nextWorker=partial(next_worker_code, pipeline_worker_names_gpu),
tags=pipeline_builder_tags,
factory=pipeline_build_factory,
)
]
pipeline_scheduler_name = (
f"{track_id}-code-{pipeline_type}-{platform_architecture}-triggerable"
)
triggerable_scheduler_names += [pipeline_scheduler_name]
schedulers += [
plugins_schedulers.Triggerable(
name=pipeline_scheduler_name, builderNames=[pipeline_builder_name]
)
]
# Create lint builder
if track_id not in conf.branches.all_lts_tracks:
print(f"Creating [{track_id}] [{pipeline_type}] [lint] builders")
pipeline_worker_names = platform_worker_names["code-lint"]
if pipeline_worker_names:
# Only create the builders if the worker exists
pipeline_builder_name = f"{track_id}-code-{pipeline_type}-lint"
pipeline_builder_tags = pipeline_builder_name.split("-")
builders += [
buildbot.plugins.util.BuilderConfig(
name=pipeline_builder_name,
workernames=pipeline_worker_names,
tags=pipeline_builder_tags,
factory=pipeline_lint_factory,
)
]
pipeline_scheduler_name = f"{track_id}-code-{pipeline_type}-lint-triggerable"
triggerable_scheduler_names += [pipeline_scheduler_name]
schedulers += [
plugins_schedulers.Triggerable(
name=pipeline_scheduler_name, builderNames=[pipeline_builder_name]
)
]
# Create coordinator.
if triggerable_scheduler_names:
trigger_properties = {
"python_module": buildbot.plugins.util.Property("python_module"),
"needs_full_clean": buildbot.plugins.util.Property("needs_full_clean"),
"needs_package_delivery": buildbot.plugins.util.Property(
"needs_package_delivery"
),
"needs_gpu_binaries": buildbot.plugins.util.Property("needs_gpu_binaries"),
"needs_gpu_tests": buildbot.plugins.util.Property("needs_gpu_tests"),
"needs_skip_tests": buildbot.plugins.util.Property("needs_skip_tests"),
"platform_architectures": buildbot.plugins.util.Property(
"platform_architectures"
),
}
if pipeline_type == "patch":
trigger_properties["patch_id"] = buildbot.plugins.util.Property("patch_id")
trigger_properties["revision"] = buildbot.plugins.util.Property("revision")
trigger_properties["build_configuration"] = buildbot.plugins.util.Property(
"build_configuration"
)
trigger_factory.addStep(
plugins_steps.SetProperties(
name="get-revision", properties=gitea.blender.get_patch_revision
)
)
elif pipeline_type == "experimental":
trigger_properties["override_branch_id"] = buildbot.plugins.util.Property(
"override_branch_id"
)
trigger_properties["revision"] = buildbot.plugins.util.Property("revision")
trigger_properties["build_configuration"] = buildbot.plugins.util.Property(
"build_configuration"
)
trigger_factory.addStep(
plugins_steps.SetProperties(
name="get-revision", properties=gitea.blender.get_branch_revision
)
)
trigger_factory.addStep(
PlatformTrigger(
schedulerNames=triggerable_scheduler_names,
waitForFinish=True,
updateSourceStamp=False,
set_properties=trigger_properties,
description="running",
descriptionDone="completed",
)
)
coordinator_builder_name = f"{track_id}-code-{pipeline_type}-coordinator"
builder_tags = coordinator_builder_name.split("-")
builders += [
buildbot.plugins.util.BuilderConfig(
name=coordinator_builder_name,
workernames=local_worker_names,
tags=builder_tags,
factory=trigger_factory,
)
]
coordinator_scheduler_name = f"{track_id}-code-{pipeline_type}-coordinator-force"
schedulers += [
plugins_schedulers.ForceScheduler(
name=coordinator_scheduler_name,
buttonName=f"Trigger {pipeline_type} build",
builderNames=[coordinator_builder_name],
codebases=[
buildbot.plugins.util.CodebaseParameter(
codebase="blender.git",
project="blender.git",
branch=code_tracked_branch_ids[track_id],
hide=True,
)
],
properties=track_properties[track_id]
+ scheduler_properties[f"code-{pipeline_type}"],
)
]
# Daily scheduler.
if pipeline_type == "daily":
print(f"Adding [{pipeline_type}] schedulers")
if needs_incremental_schedulers and (track_id in code_track_ids):
incremental_scheduler_name = (
f"{track_id}-code-{pipeline_type}-coordinator-incremental"
)
incremental_scheduler_properties = {
"revision": "HEAD",
"python_module": False,
"needs_skip_tests": False,
"needs_package_delivery": False,
"needs_gpu_binaries": False,
"build_configuration": "release",
"platform_architectures": code_official_platform_architectures[
track_id
],
}
change_filter = buildbot.plugins.util.ChangeFilter(
project=["blender.git"], branch=code_tracked_branch_ids[track_id]
)
schedulers += [
plugins_schedulers.SingleBranchScheduler(
name=incremental_scheduler_name,
builderNames=[coordinator_builder_name],
change_filter=change_filter,
properties=incremental_scheduler_properties,
treeStableTimer=tree_stable_timer_in_seconds,
)
]
if needs_nightly_schedulers and (track_id in code_track_ids):
nightly_scheduler_name = (
f"{track_id}-code-{pipeline_type}-coordinator-nightly"
)
nightly_properties = {
"revision": "HEAD",
"python_module": False,
"needs_skip_tests": False,
"needs_package_delivery": True,
"needs_gpu_binaries": True,
"build_configuration": "release",
"platform_architectures": code_all_platform_architectures[track_id],
}
nightly_codebases = {
"blender.git": {
"repository": "",
"branch": code_tracked_branch_ids[track_id],
"revision": None,
}
}
schedulers += [
plugins_schedulers.Nightly(
name=nightly_scheduler_name,
builderNames=[coordinator_builder_name],
codebases=nightly_codebases,
properties=nightly_properties,
onlyIfChanged=False,
hour=1,
minute=30,
)
]
return builders, schedulers

View file

@ -0,0 +1,94 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
import pathlib
from functools import partial
import buildbot.plugins
from buildbot.plugins import steps as plugins_steps
import conf.branches
import conf.worker
import pipeline.common
# Custom file upload that shows links to download files.
class LinkMultipleFileUpload(plugins_steps.MultipleFileUpload):
def uploadDone(self, result, source, masterdest):
if not self.url:
return
name = pathlib.Path(source).name
self.addURL(name, self.url + "/" + name + "/report.html")
def allUploadsDone(self, result, sources, masterdest):
return
def create_deliver_step(devops_env_id):
worker_config = conf.worker.get_config(devops_env_id)
file_size_in_mb = 500 * 1024 * 1024
worker_source_path = pathlib.Path("../../../../git/blender-vdev/build_package")
master_dest_path = worker_config.buildbot_download_folder / "daily" / "benchmarks"
return LinkMultipleFileUpload(
name="deliver",
maxsize=file_size_in_mb,
workdir=f"{worker_source_path}",
glob=True,
workersrcs=["main-*"],
masterdest=f"{master_dest_path}",
mode=0o644,
url="../download/daily/benchmarks",
description="running",
descriptionDone="completed",
alwaysRun=True,
)
def populate(devops_env_id):
properties = [
buildbot.plugins.util.StringParameter(
name="commit_id",
label="Commit:",
required=True,
size=80,
default="HEAD",
),
buildbot.plugins.util.BooleanParameter(
name="needs_gpu_binaries",
label="GPU binaries -> build Cycles GPU kernels",
required=True,
strict=True,
default=True,
hide=True,
),
]
return pipeline.common.create_pipeline(
devops_env_id,
"code-benchmark",
"code_benchmark.py",
[
"configure-machine",
"update-code",
"compile-code",
"compile-gpu",
"compile-install",
"benchmark",
partial(create_deliver_step, devops_env_id),
"clean",
],
{"vdev": "main"},
properties,
"blender.git",
["linux-x86_64-code-gpu", "darwin-arm64-code-gpu"],
# Compile GPU step needs a long timeout.
default_step_timeout_in_seconds=90 * 60,
variations=["linux", "darwin"],
nightly_properties={"commit_id": "HEAD", "needs_gpu_binaries": True},
hour=7,
minute=30,
)

View file

@ -0,0 +1,30 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
# Builders for deploying Python module releases to PyPI.
import conf.branches
import pipeline.common
def populate(devops_env_id):
properties = []
return pipeline.common.create_pipeline(
devops_env_id,
"code-bpy-deploy",
"code_bpy_deploy.py",
[
"configure-machine",
"update-code",
"pull",
"deliver-pypi",
"clean",
],
conf.branches.code_deploy_track_ids,
properties,
"blender.git",
["linux-x86_64-general"],
)

View file

@ -0,0 +1,43 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
# Builders for deploying Blender releases.
import buildbot.plugins
import conf.branches
import pipeline.common
def populate(devops_env_id):
properties = [
buildbot.plugins.util.BooleanParameter(
name="needs_full_clean",
label="Full clean -> removes build workspace on machine",
required=True,
strict=True,
default=False,
),
]
return pipeline.common.create_pipeline(
devops_env_id,
"code-artifacts-deploy",
"code_deploy.py",
[
"configure-machine",
"update-code",
"package-source",
"pull-artifacts",
"repackage-artifacts",
"deploy-artifacts",
"monitor-artifacts",
"clean",
],
conf.branches.code_deploy_track_ids,
properties,
"blender.git",
["linux-x86_64-general"],
default_step_timeout_in_seconds=30 * 60,
)

View file

@ -0,0 +1,235 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
# Builders for releasing Blender to stores.
import pathlib
import buildbot.plugins
from buildbot.plugins import steps as plugins_steps
from buildbot.plugins import schedulers as plugins_schedulers
import conf.branches
import conf.worker
import pipeline.common
# Timeouts.
default_step_timeout_in_seconds = 60 * 60
# Tracks.
track_ids = conf.branches.code_store_track_ids
tracked_branch_ids = {}
for track_id in track_ids:
tracked_branch_ids[track_id] = conf.branches.code_tracked_branch_ids[track_id]
# Properties.
scheduler_properties = [
buildbot.plugins.util.ChoiceStringParameter(
name="store_id",
label="Store:",
required=True,
choices=["snap", "steam", "windows"],
multiple=True,
strict=True,
default=["snap", "steam", "windows"],
),
]
def create_deliver_binaries_windows_step(worker_config, track_id, pipeline_type):
# Create step for uploading msix to download.blender.org.
file_size_in_mb = 500 * 1024 * 1024
worker_source_path = pathlib.Path(f"../../../../git/blender-{track_id}/build_package")
master_dest_path = pathlib.Path(
f"{worker_config.buildbot_download_folder}/{pipeline_type}"
).expanduser()
return plugins_steps.MultipleFileUpload(
name="deliver-binaries",
maxsize=file_size_in_mb,
workdir=f"{worker_source_path}",
glob=True,
workersrcs=["*.msix*"],
masterdest=f"{master_dest_path}",
mode=0o644,
url=None,
description="running",
descriptionDone="completed",
)
def populate(devops_env_id):
builders = []
schedulers = []
platform_worker_names = conf.machines.fetch_platform_worker_names(devops_env_id)
local_worker_names = conf.machines.fetch_local_worker_names()
worker_config = conf.worker.get_config(devops_env_id)
needs_nightly_schedulers = devops_env_id == "PROD"
pipeline_type = "daily"
store_ids = ["steam", "snap", "windows"]
print("*** Creating [code] [store] pipeline")
for track_id in track_ids:
triggerable_scheduler_names = []
trigger_factory = buildbot.plugins.util.BuildFactory()
for store_id in store_ids:
# Create build steps.
pipeline_build_factory = buildbot.plugins.util.BuildFactory()
step_names = [
"configure-machine",
"update-code",
"pull-artifacts",
"package",
]
if store_id == "windows":
step_names += ["deliver-binaries"]
else:
step_names += ["deliver"]
step_names += ["clean"]
print(f"Creating [{track_id}] [code] [store] [{store_id}] pipeline steps")
for step_name in step_names:
if step_name == "deliver-binaries":
step = create_deliver_binaries_windows_step(
worker_config, track_id, pipeline_type
)
else:
args = ["--store-id", store_id, step_name]
step_command = pipeline.common.create_worker_command(
"code_store.py", devops_env_id, track_id, args
)
step = plugins_steps.ShellCommand(
name=step_name,
logEnviron=True,
haltOnFailure=True,
timeout=default_step_timeout_in_seconds,
description="running",
descriptionDone="completed",
command=step_command,
)
pipeline_build_factory.addStep(step)
for master_step_name in pipeline.common.code_pipeline_master_step_names:
master_step_command = pipeline.common.create_master_command_args.withArgs(
devops_env_id, track_id, pipeline_type, master_step_name, single_platform=False
)
# Master to archive and purge builds
pipeline_build_factory.addStep(
plugins_steps.MasterShellCommand(
name=master_step_name,
logEnviron=False,
command=master_step_command,
description="running",
descriptionDone="completed",
)
)
# Create builders.
worker_group_id = (
f"windows-amd64-store-{store_id}"
if store_id == "windows"
else f"linux-x86_64-store-{store_id}"
)
pipeline_worker_names = platform_worker_names[worker_group_id]
if pipeline_worker_names:
pipeline_builder_name = f"{track_id}-code-store-{store_id}"
builder_tags = pipeline_builder_name.split("-")
builders += [
buildbot.plugins.util.BuilderConfig(
name=pipeline_builder_name,
workernames=pipeline_worker_names,
tags=builder_tags,
factory=pipeline_build_factory,
)
]
scheduler_name = f"{track_id}-code-store-{store_id}-triggerable"
triggerable_scheduler_names += [scheduler_name]
schedulers += [
plugins_schedulers.Triggerable(
name=scheduler_name, builderNames=[pipeline_builder_name]
)
]
# Create coordinator.
if triggerable_scheduler_names:
trigger_properties = {}
trigger_factory.addStep(
plugins_steps.Trigger(
schedulerNames=triggerable_scheduler_names,
waitForFinish=True,
updateSourceStamp=False,
set_properties=trigger_properties,
description="running",
descriptionDone="completed",
)
)
coordinator_builder_name = f"{track_id}-code-store-coordinator"
builder_tags = coordinator_builder_name.split("-")
builders += [
buildbot.plugins.util.BuilderConfig(
name=coordinator_builder_name,
workernames=local_worker_names,
tags=builder_tags,
factory=trigger_factory,
)
]
coordinator_scheduler_name = f"{track_id}-code-store-coordinator-force"
schedulers += [
plugins_schedulers.ForceScheduler(
name=coordinator_scheduler_name,
buttonName="Trigger store build",
builderNames=[coordinator_builder_name],
codebases=[
buildbot.plugins.util.CodebaseParameter(
codebase="", revision=None, hide=True
)
],
properties=scheduler_properties,
)
]
if needs_nightly_schedulers and (track_id in track_ids):
nightly_scheduler_name = f"{track_id}-code-store-coordinator-nightly"
nightly_properties = {
"revision": "HEAD",
}
nightly_codebases = {
"blender.git": {
"repository": "",
"branch": tracked_branch_ids[track_id],
"revision": None,
}
}
schedulers += [
plugins_schedulers.Nightly(
name=nightly_scheduler_name,
builderNames=[coordinator_builder_name],
codebases=nightly_codebases,
properties=nightly_properties,
onlyIfChanged=False,
hour=5,
minute=30,
)
]
return builders, schedulers

335
config/pipeline/common.py Normal file
View file

@ -0,0 +1,335 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
import buildbot.plugins
from buildbot.plugins import steps as plugins_steps
from buildbot.plugins import schedulers as plugins_schedulers
import conf.machines
devops_git_root_path = "~/git"
# Steps that run on the buildbot master.
code_pipeline_master_step_names = [
"deduplicate-binaries",
"purge-binaries",
]
def fetch_property(props, key, default=None):
value = default
if key in props:
value = props[key]
return value
def fetch_platform_architecture(props):
platform_architectures = fetch_property(props, key="platform_architectures")
# Find the platform arch for this builder
buildername = fetch_property(props, key="buildername")
builder_platform_architecture = "-".join(buildername.split("-")[-2:])
found_platform_architecture = None
if platform_architectures:
for platform_architecture in platform_architectures:
if platform_architecture in builder_platform_architecture:
found_platform_architecture = platform_architecture
break
if found_platform_architecture:
return found_platform_architecture.split("-")
else:
return None, None
def always_do_step(step):
return True
def needs_do_doc_pipeline_step(step):
if "package" in step.name or "deliver" in step.name:
return step.getProperty("needs_package_delivery")
else:
return True
def create_worker_command(script, devops_env_id, track_id, args):
# This relative path assume were are in:
# ~/.devops/services/buildbot-worker/<builder-name>/build
# There appears to be no way to expand a tilde here?
#
# This is assumed to run within the buildbot worker pipenv,
# so the python command should match the python version and
# available packages.
cmd = [
"python",
f"../../../../../git/blender-devops/buildbot/worker/{script}",
"--track-id",
track_id,
"--service-env-id",
devops_env_id,
]
return cmd + list(args)
@buildbot.plugins.util.renderer
def create_master_command_args(
props, devops_env_id, track_id, pipeline_type, step_name, single_platform
):
build_configuration = fetch_property(props, key="build_configuration", default="release")
python_module = fetch_property(props, key="python_module", default=False)
python_module_string = "true" if python_module else "false"
args = [
"--pipeline-type",
pipeline_type,
"--build-configuration",
build_configuration,
]
if single_platform:
# Archive binaries for a single architecture only?
platform_id, architecture = fetch_platform_architecture(props)
args += ["--platform-id", platform_id, "--architecture", architecture]
if python_module:
args += ["--python-module"]
args += [step_name]
# This relative path assume were are in:
# ~/.devops/services/buildbot-master
# There appears to be no way to expand a tilde here?
#
# This is assumed to run within the buildbot master pipenv,
# so the python command should match the python version and
# available packages.
cmd = [
"python",
"../../../git/blender-devops/buildbot/worker/archive.py",
"--track-id",
track_id,
"--service-env-id",
devops_env_id,
]
return cmd + list(args)
@buildbot.plugins.util.renderer
def create_pipeline_worker_command(
props,
devops_env_id,
track_id,
script,
step_name,
variation_property,
variation,
builder_properties,
):
args = [step_name]
if variation_property:
args += ["--" + variation_property.replace("_", "-"), variation]
for builder_prop in builder_properties:
if builder_prop.name in props:
prop_value = props[builder_prop.name]
else:
prop_value = builder_prop.default
argument_name = "--" + builder_prop.name.replace("_", "-")
if isinstance(builder_prop, buildbot.plugins.util.BooleanParameter):
if prop_value in ["true", True]:
args += [argument_name]
else:
args += [argument_name, prop_value]
if "revision" in props and props["revision"]:
args += ["--commit-id", props["revision"]]
return create_worker_command(script, devops_env_id, track_id, args)
def create_pipeline(
devops_env_id,
artifact_id,
script,
steps,
tracked_branch_ids,
properties,
codebase,
worker_group_ids,
variation_property=None,
variations=[""],
incremental_properties=None,
nightly_properties=None,
do_step_if=always_do_step,
default_step_timeout_in_seconds=600,
tree_stable_timer_in_seconds=180,
hour=5,
minute=0,
):
builders = []
schedulers = []
platform_worker_names = conf.machines.fetch_platform_worker_names(devops_env_id)
local_worker_names = conf.machines.fetch_local_worker_names()
needs_incremental_schedulers = incremental_properties is not None and devops_env_id in ["PROD"]
needs_nightly_schedulers = nightly_properties is not None and devops_env_id in ["PROD"]
track_ids = tracked_branch_ids.keys()
print(f"*** Creating [{artifact_id}] pipeline")
for track_id in track_ids:
triggerable_scheduler_names = []
trigger_factory = buildbot.plugins.util.BuildFactory()
for worker_group_id, variation in zip(worker_group_ids, variations):
if variation:
pipeline_builder_name = f"{track_id}-{artifact_id}-{variation}"
else:
pipeline_builder_name = f"{track_id}-{artifact_id}"
pipeline_build_factory = buildbot.plugins.util.BuildFactory()
print(f"Creating [{pipeline_builder_name}] pipeline steps")
for step in steps:
if callable(step):
pipeline_build_factory.addStep(step())
continue
step_command = create_pipeline_worker_command.withArgs(
devops_env_id,
track_id,
script,
step,
variation_property,
variation,
properties,
)
pipeline_build_factory.addStep(
plugins_steps.ShellCommand(
name=step,
logEnviron=True,
haltOnFailure=True,
timeout=default_step_timeout_in_seconds,
description="running",
descriptionDone="completed",
command=step_command,
doStepIf=do_step_if,
)
)
# Create builder.
pipeline_worker_names = platform_worker_names[worker_group_id]
if pipeline_worker_names:
builder_tags = pipeline_builder_name.split("-")
builders += [
buildbot.plugins.util.BuilderConfig(
name=pipeline_builder_name,
workernames=pipeline_worker_names,
tags=builder_tags,
factory=pipeline_build_factory,
)
]
scheduler_name = f"{pipeline_builder_name}-triggerable"
triggerable_scheduler_names += [scheduler_name]
schedulers += [
plugins_schedulers.Triggerable(
name=scheduler_name, builderNames=[pipeline_builder_name]
)
]
# Only create scheduler if we have something to to trigger
if triggerable_scheduler_names:
trigger_properties = {}
for property in properties:
if property != variation_property:
trigger_properties[property.name] = buildbot.plugins.util.Property(
property.name
)
trigger_factory.addStep(
plugins_steps.Trigger(
schedulerNames=triggerable_scheduler_names,
waitForFinish=True,
updateSourceStamp=False,
set_properties=trigger_properties,
description="running",
descriptionDone="completed",
)
)
coordinator_builder_name = f"{track_id}-{artifact_id}-coordinator"
builder_tags = coordinator_builder_name.split("-")
builders += [
buildbot.plugins.util.BuilderConfig(
name=coordinator_builder_name,
workernames=local_worker_names,
tags=builder_tags,
factory=trigger_factory,
)
]
coordinator_scheduler_name = f"{track_id}-{artifact_id}-coordinator-force"
schedulers += [
plugins_schedulers.ForceScheduler(
name=coordinator_scheduler_name,
buttonName="Trigger build",
builderNames=[coordinator_builder_name],
codebases=[
buildbot.plugins.util.CodebaseParameter(
codebase="", revision=None, hide=True
)
],
properties=properties,
)
]
if needs_incremental_schedulers and (track_id in track_ids):
incremental_scheduler_name = f"{track_id}-{artifact_id}-coordinator-incremental"
change_filter = buildbot.plugins.util.ChangeFilter(
project=[codebase], branch=tracked_branch_ids[track_id]
)
schedulers += [
plugins_schedulers.SingleBranchScheduler(
name=incremental_scheduler_name,
builderNames=[coordinator_builder_name],
change_filter=change_filter,
properties=incremental_properties,
treeStableTimer=tree_stable_timer_in_seconds,
)
]
if needs_nightly_schedulers and (track_id in track_ids):
nightly_codebases = {
codebase: {
"repository": "",
"branch": tracked_branch_ids[track_id],
"revision": None,
}
}
nightly_scheduler_name = f"{track_id}-{artifact_id}-coordinator-nightly"
schedulers += [
plugins_schedulers.Nightly(
name=nightly_scheduler_name,
builderNames=[coordinator_builder_name],
codebases=nightly_codebases,
properties=nightly_properties,
onlyIfChanged=False,
hour=hour,
minute=minute,
)
]
return builders, schedulers

View file

@ -0,0 +1,54 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
import buildbot.plugins
import conf.branches
import pipeline.common
def populate(devops_env_id):
properties = [
buildbot.plugins.util.BooleanParameter(
name="needs_full_clean",
label="Full clean -> removes build workspace on machine",
required=True,
strict=True,
default=False,
),
buildbot.plugins.util.BooleanParameter(
name="needs_package_delivery",
label="Package delivery -> push build to configured services",
required=True,
strict=True,
default=False,
),
]
return pipeline.common.create_pipeline(
devops_env_id,
"doc-api",
"doc_api.py",
[
"configure-machine",
"update-code",
"compile-code",
"compile-install",
"compile",
"package",
"deliver",
"clean",
],
conf.branches.code_tracked_branch_ids,
properties,
"blender.git",
["linux-x86_64-general"],
variations=["html"],
incremental_properties={"needs_package_delivery": False},
nightly_properties={"needs_package_delivery": True},
tree_stable_timer_in_seconds=15 * 60,
do_step_if=pipeline.common.needs_do_doc_pipeline_step,
hour=1,
minute=30,
)

View file

@ -0,0 +1,32 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
import buildbot.plugins
import pipeline.common
def populate(devops_env_id):
properties = [
buildbot.plugins.util.BooleanParameter(
name="needs_package_delivery",
label="Package delivery -> push build to configured services",
required=True,
strict=True,
default=True,
),
]
return pipeline.common.create_pipeline(
devops_env_id,
"doc-developer",
"doc_developer.py",
["update", "compile", "deliver"],
{"vdev": "main"},
properties,
"blender-developer-docs.git",
["linux-x86_64-general"],
incremental_properties={"needs_package_delivery": True},
do_step_if=pipeline.common.needs_do_doc_pipeline_step,
)

View file

@ -0,0 +1,44 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
import buildbot.plugins
import conf.branches
import pipeline.common
def populate(devops_env_id):
properties = [
buildbot.plugins.util.BooleanParameter(
name="needs_package_delivery",
label="Package delivery -> push build to configured services",
required=True,
strict=True,
default=True,
),
buildbot.plugins.util.BooleanParameter(
name="needs_all_locales",
label="All locales -> process all configure locales",
required=True,
strict=True,
default=False,
),
]
return pipeline.common.create_pipeline(
devops_env_id,
"doc-manual",
"doc_manual.py",
["configure-machine", "update", "compile", "package", "deliver", "clean"],
conf.branches.code_tracked_branch_ids,
properties,
"blender-manual.git",
["linux-x86_64-general", "linux-x86_64-general"],
variation_property="doc_format",
variations=["html", "epub"],
incremental_properties={"needs_package_delivery": True, "needs_all_locales": False},
nightly_properties={"needs_package_delivery": True, "needs_all_locales": True},
tree_stable_timer_in_seconds=15 * 60,
do_step_if=pipeline.common.needs_do_doc_pipeline_step,
)

View file

@ -0,0 +1,32 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
# <pep8 compliant>
import buildbot.plugins
import pipeline.common
def populate(devops_env_id):
properties = [
buildbot.plugins.util.BooleanParameter(
name="needs_package_delivery",
label="Package delivery -> push build to configured services",
required=True,
strict=True,
default=True,
),
]
return pipeline.common.create_pipeline(
devops_env_id,
"doc-studio-tools",
"doc_studio.py",
["update", "compile", "deliver"],
{"vdev": "main"},
properties,
"blender-studio-tools.git",
["linux-x86_64-doc-studio-tools"],
incremental_properties={"needs_package_delivery": True},
do_step_if=pipeline.common.needs_do_doc_pipeline_step,
)