Port back commits from blender-devops #1

Open
bartvdbraak wants to merge 13 commits from redo into main
32 changed files with 675 additions and 268 deletions
Showing only changes of commit d6bce1b39d - Show all commits

View file

@ -32,12 +32,18 @@ def fetch_authorization(devops_env_id: str):
deploy_dev_usernames = auth_config.deploy_dev_usernames deploy_dev_usernames = auth_config.deploy_dev_usernames
trusted_dev_usernames = auth_config.trusted_dev_usernames trusted_dev_usernames = auth_config.trusted_dev_usernames
dev_usernames = list(set(deploy_dev_usernames + trusted_dev_usernames + admin_usernames)) dev_usernames = list(
set(deploy_dev_usernames + trusted_dev_usernames + admin_usernames)
)
deploy_usernames = list(set(deploy_dev_usernames + admin_usernames)) deploy_usernames = list(set(deploy_dev_usernames + admin_usernames))
file_based_group_username_role_matchers = [ file_based_group_username_role_matchers = [
buildbot.plugins.util.RolesFromUsername(roles=["admin"], usernames=admin_usernames), buildbot.plugins.util.RolesFromUsername(
buildbot.plugins.util.RolesFromUsername(roles=["deploy"], usernames=deploy_usernames), roles=["admin"], usernames=admin_usernames
),
buildbot.plugins.util.RolesFromUsername(
roles=["deploy"], usernames=deploy_usernames
),
buildbot.plugins.util.RolesFromUsername(roles=["dev"], usernames=dev_usernames), buildbot.plugins.util.RolesFromUsername(roles=["dev"], usernames=dev_usernames),
] ]
@ -52,9 +58,15 @@ def fetch_authorization(devops_env_id: str):
# buildbot.plugins.util.AnyEndpointMatcher(role='dev', defaultDeny=False), # buildbot.plugins.util.AnyEndpointMatcher(role='dev', defaultDeny=False),
# buildbot.plugins.util.AnyEndpointMatcher(role='coordinator', defaultDeny=False), # buildbot.plugins.util.AnyEndpointMatcher(role='coordinator', defaultDeny=False),
# buildbot.plugins.util.AnyEndpointMatcher(role='anonymous', defaultDeny=False), # buildbot.plugins.util.AnyEndpointMatcher(role='anonymous', defaultDeny=False),
buildbot.plugins.util.StopBuildEndpointMatcher(role="dev", defaultDeny=True), buildbot.plugins.util.StopBuildEndpointMatcher(
buildbot.plugins.util.RebuildBuildEndpointMatcher(role="dev", defaultDeny=True), role="dev", defaultDeny=True
buildbot.plugins.util.EnableSchedulerEndpointMatcher(role="admin", defaultDeny=True), ),
buildbot.plugins.util.RebuildBuildEndpointMatcher(
role="dev", defaultDeny=True
),
buildbot.plugins.util.EnableSchedulerEndpointMatcher(
role="admin", defaultDeny=True
),
# buildbot.plugins.util.AnyEndpointMatcher(role='any', defaultDeny=False), # buildbot.plugins.util.AnyEndpointMatcher(role='any', defaultDeny=False),
# Force roles # Force roles
buildbot.plugins.util.ForceBuildEndpointMatcher( buildbot.plugins.util.ForceBuildEndpointMatcher(
@ -95,10 +107,14 @@ def fetch_authorization(devops_env_id: str):
builder="*-doc-*", role="dev", defaultDeny=True builder="*-doc-*", role="dev", defaultDeny=True
), ),
# This also affects starting jobs via force scheduler # This also affects starting jobs via force scheduler
buildbot.plugins.util.AnyControlEndpointMatcher(role="admin", defaultDeny=True), buildbot.plugins.util.AnyControlEndpointMatcher(
role="admin", defaultDeny=True
),
# A default deny for any endpoint if not admin # A default deny for any endpoint if not admin
# If this is missing at the end, any UNMATCHED group will get 'allow'... # If this is missing at the end, any UNMATCHED group will get 'allow'...
buildbot.plugins.util.AnyControlEndpointMatcher(role="admin", defaultDeny=True), buildbot.plugins.util.AnyControlEndpointMatcher(
role="admin", defaultDeny=True
),
], ],
roleMatchers=file_based_group_username_role_matchers, roleMatchers=file_based_group_username_role_matchers,
) )

View file

@ -31,7 +31,9 @@ def setup_service(devops_env_id: str):
if gitea_api_token: if gitea_api_token:
log.msg("Found Gitea API token, enabling status push") log.msg("Found Gitea API token, enabling status push")
return gitea.reporter.GiteaStatusService11(gitea_url, gitea_api_token, verbose=False) return gitea.reporter.GiteaStatusService11(
gitea_url, gitea_api_token, verbose=False
)
else: else:
log.msg("No Gitea API token found, status push disabled") log.msg("No Gitea API token found, status push disabled")
return None return None

View file

@ -81,7 +81,9 @@ class GiteaStatusService11(http.ReporterBase):
yield super().reconfigService(generators=generators, **kwargs) yield super().reconfigService(generators=generators, **kwargs)
self.context = context or Interpolate("buildbot/%(prop:buildername)s") self.context = context or Interpolate("buildbot/%(prop:buildername)s")
self.context_pr = context_pr or Interpolate("buildbot/pull_request/%(prop:buildername)s") self.context_pr = context_pr or Interpolate(
"buildbot/pull_request/%(prop:buildername)s"
)
if baseURL.endswith("/"): if baseURL.endswith("/"):
baseURL = baseURL[:-1] baseURL = baseURL[:-1]
self.baseURL = baseURL self.baseURL = baseURL
@ -107,7 +109,14 @@ class GiteaStatusService11(http.ReporterBase):
] ]
def createStatus( def createStatus(
self, project_owner, repo_name, sha, state, target_url=None, description=None, context=None self,
project_owner,
repo_name,
sha,
state,
target_url=None,
description=None,
context=None,
): ):
""" """
:param project_owner: username of the owning user or organization :param project_owner: username of the owning user or organization
@ -164,19 +173,25 @@ class GiteaStatusService11(http.ReporterBase):
log.msg( log.msg(
'Could not send status "{state}" for ' 'Could not send status "{state}" for '
"{repo} at {sha}: {code} : {message}".format( "{repo} at {sha}: {code} : {message}".format(
state=state, repo=repository_name, sha=sha, code=res.code, message=message state=state,
repo=repository_name,
sha=sha,
code=res.code,
message=message,
) )
) )
elif self.verbose: elif self.verbose:
log.msg( log.msg(
'Status "{state}" sent for ' 'Status "{state}" sent for ' "{repo} at {sha}.".format(
"{repo} at {sha}.".format(state=state, repo=repository_name, sha=sha) state=state, repo=repository_name, sha=sha
)
) )
except Exception as e: except Exception as e:
log.err( log.err(
e, e,
'Failed to send status "{state}" for ' 'Failed to send status "{state}" for ' "{repo} at {sha}".format(
"{repo} at {sha}".format(state=state, repo=repository_name, sha=sha), state=state, repo=repository_name, sha=sha
),
) )
@defer.inlineCallbacks @defer.inlineCallbacks

View file

@ -92,7 +92,9 @@ code_python_module_skip_test_names = ["sign-code-binaries"]
code_tracked_branch_ids = conf.branches.code_tracked_branch_ids code_tracked_branch_ids = conf.branches.code_tracked_branch_ids
code_track_ids = list(code_tracked_branch_ids.keys()) code_track_ids = list(code_tracked_branch_ids.keys())
code_all_platform_architectures = conf.branches.code_all_platform_architectures code_all_platform_architectures = conf.branches.code_all_platform_architectures
code_official_platform_architectures = conf.branches.code_official_platform_architectures code_official_platform_architectures = (
conf.branches.code_official_platform_architectures
)
code_track_pipeline_types = {} code_track_pipeline_types = {}
track_properties = {} track_properties = {}
@ -211,7 +213,12 @@ scheduler_properties_patch = [
default=False, default=False,
), ),
buildbot.plugins.util.StringParameter( buildbot.plugins.util.StringParameter(
name="pull_revision", label="Pull Revision:", required=False, hide=True, size=80, default="" name="pull_revision",
label="Pull Revision:",
required=False,
hide=True,
size=80,
default="",
), ),
] ]
@ -225,12 +232,20 @@ scheduler_properties = {
@buildbot.plugins.util.renderer @buildbot.plugins.util.renderer
def create_code_worker_command_args(props, devops_env_id, track_id, pipeline_type, step_name): def create_code_worker_command_args(
props, devops_env_id, track_id, pipeline_type, step_name
):
commit_id = pipeline.common.fetch_property(props, key="revision", default="HEAD") commit_id = pipeline.common.fetch_property(props, key="revision", default="HEAD")
patch_id = pipeline.common.fetch_property(props, key="patch_id", default="") patch_id = pipeline.common.fetch_property(props, key="patch_id", default="")
override_branch_id = pipeline.common.fetch_property(props, key="override_branch_id", default="") override_branch_id = pipeline.common.fetch_property(
python_module = pipeline.common.fetch_property(props, key="python_module", default=False) props, key="override_branch_id", default=""
needs_gpu_tests = pipeline.common.fetch_property(props, key="needs_gpu_tests", default=False) )
python_module = pipeline.common.fetch_property(
props, key="python_module", default=False
)
needs_gpu_tests = pipeline.common.fetch_property(
props, key="needs_gpu_tests", default=False
)
needs_gpu_binaries = pipeline.common.fetch_property( needs_gpu_binaries = pipeline.common.fetch_property(
props, key="needs_gpu_binaries", default=False props, key="needs_gpu_binaries", default=False
) )
@ -279,11 +294,12 @@ def create_code_worker_command_args(props, devops_env_id, track_id, pipeline_typ
args += [step_name] args += [step_name]
return pipeline.common.create_worker_command("code.py", devops_env_id, track_id, args) return pipeline.common.create_worker_command(
"code.py", devops_env_id, track_id, args
)
def needs_do_code_pipeline_step(step): def needs_do_code_pipeline_step(step):
build = step.build
# Use this to test master steps only, otherwise we be waiting for 30 minutes # Use this to test master steps only, otherwise we be waiting for 30 minutes
needs_master_steps_only = False needs_master_steps_only = False
@ -291,9 +307,7 @@ def needs_do_code_pipeline_step(step):
is_master_step = step.name in pipeline.common.code_pipeline_master_step_names is_master_step = step.name in pipeline.common.code_pipeline_master_step_names
return is_master_step return is_master_step
worker = step.worker step.getWorkerName()
worker_name = step.getWorkerName()
worker_system = worker.worker_system
is_package_delivery_step = (step.name in code_delivery_step_names) or ( is_package_delivery_step = (step.name in code_delivery_step_names) or (
step.name in pipeline.common.code_pipeline_master_step_names step.name in pipeline.common.code_pipeline_master_step_names
@ -337,7 +351,9 @@ class LinkMultipleFileUpload(plugins_steps.MultipleFileUpload):
def create_deliver_code_binaries_step(worker_config, track_id, pipeline_type): def create_deliver_code_binaries_step(worker_config, track_id, pipeline_type):
file_size_in_mb = 500 * 1024 * 1024 file_size_in_mb = 500 * 1024 * 1024
worker_source_path = pathlib.Path(f"../../../../git/blender-{track_id}/build_package") worker_source_path = pathlib.Path(
f"../../../../git/blender-{track_id}/build_package"
)
master_dest_path = pathlib.Path( master_dest_path = pathlib.Path(
f"{worker_config.buildbot_download_folder}/{pipeline_type}" f"{worker_config.buildbot_download_folder}/{pipeline_type}"
).expanduser() ).expanduser()
@ -359,7 +375,9 @@ def create_deliver_code_binaries_step(worker_config, track_id, pipeline_type):
def create_deliver_test_results_step(worker_config, track_id, pipeline_type): def create_deliver_test_results_step(worker_config, track_id, pipeline_type):
file_size_in_mb = 500 * 1024 * 1024 file_size_in_mb = 500 * 1024 * 1024
worker_source_path = pathlib.Path(f"../../../../git/blender-{track_id}/build_package") worker_source_path = pathlib.Path(
f"../../../../git/blender-{track_id}/build_package"
)
master_dest_path = pathlib.Path( master_dest_path = pathlib.Path(
f"{worker_config.buildbot_download_folder}/{pipeline_type}" f"{worker_config.buildbot_download_folder}/{pipeline_type}"
).expanduser() ).expanduser()
@ -456,9 +474,13 @@ def populate(devops_env_id):
print(f"Creating [{track_id}] [code] [{pipeline_type}] pipeline steps") print(f"Creating [{track_id}] [code] [{pipeline_type}] pipeline steps")
for step_name in step_names: for step_name in step_names:
if step_name == "deliver-code-binaries": if step_name == "deliver-code-binaries":
step = create_deliver_code_binaries_step(worker_config, track_id, pipeline_type) step = create_deliver_code_binaries_step(
worker_config, track_id, pipeline_type
)
elif step_name == "deliver-test-results": elif step_name == "deliver-test-results":
step = create_deliver_test_results_step(worker_config, track_id, pipeline_type) step = create_deliver_test_results_step(
worker_config, track_id, pipeline_type
)
else: else:
needs_halt_on_failure = True needs_halt_on_failure = True
if step_name in code_pipeline_test_step_names: if step_name in code_pipeline_test_step_names:
@ -488,8 +510,14 @@ def populate(devops_env_id):
pipeline_build_factory.addStep(step) pipeline_build_factory.addStep(step)
for master_step_name in pipeline.common.code_pipeline_master_step_names: for master_step_name in pipeline.common.code_pipeline_master_step_names:
master_step_command = pipeline.common.create_master_command_args.withArgs( master_step_command = (
devops_env_id, track_id, pipeline_type, master_step_name, single_platform=True pipeline.common.create_master_command_args.withArgs(
devops_env_id,
track_id,
pipeline_type,
master_step_name,
single_platform=True,
)
) )
# Master to archive and purge builds # Master to archive and purge builds
@ -528,7 +556,9 @@ def populate(devops_env_id):
# Create builders. # Create builders.
for platform_architecture in code_all_platform_architectures[track_id]: for platform_architecture in code_all_platform_architectures[track_id]:
print(f"Creating [{track_id}] [{pipeline_type}] [{platform_architecture}] builders") print(
f"Creating [{track_id}] [{pipeline_type}] [{platform_architecture}] builders"
)
worker_group_id = f"{platform_architecture}-code" worker_group_id = f"{platform_architecture}-code"
worker_group_id_gpu = f"{platform_architecture}-code-gpu" worker_group_id_gpu = f"{platform_architecture}-code-gpu"
@ -544,30 +574,36 @@ def populate(devops_env_id):
# Assigning different workers for different tracks, specifically Linux builders. # Assigning different workers for different tracks, specifically Linux builders.
suitable_pipeline_worker_names = pipeline_worker_names suitable_pipeline_worker_names = pipeline_worker_names
if platform_architecture == "linux-x86_64" and devops_env_id != "LOCAL": if (
platform_architecture == "linux-x86_64"
and devops_env_id != "LOCAL"
):
selector = "rocky" selector = "rocky"
suitable_pipeline_worker_names = [ suitable_pipeline_worker_names = [
worker for worker in pipeline_worker_names if selector in worker worker
for worker in pipeline_worker_names
if selector in worker
] ]
builders += [ builders += [
buildbot.plugins.util.BuilderConfig( buildbot.plugins.util.BuilderConfig(
name=pipeline_builder_name, name=pipeline_builder_name,
workernames=suitable_pipeline_worker_names, workernames=suitable_pipeline_worker_names,
nextWorker=partial(next_worker_code, pipeline_worker_names_gpu), nextWorker=partial(
next_worker_code, pipeline_worker_names_gpu
),
tags=pipeline_builder_tags, tags=pipeline_builder_tags,
factory=pipeline_build_factory, factory=pipeline_build_factory,
) )
] ]
pipeline_scheduler_name = ( pipeline_scheduler_name = f"{track_id}-code-{pipeline_type}-{platform_architecture}-triggerable"
f"{track_id}-code-{pipeline_type}-{platform_architecture}-triggerable"
)
triggerable_scheduler_names += [pipeline_scheduler_name] triggerable_scheduler_names += [pipeline_scheduler_name]
schedulers += [ schedulers += [
plugins_schedulers.Triggerable( plugins_schedulers.Triggerable(
name=pipeline_scheduler_name, builderNames=[pipeline_builder_name] name=pipeline_scheduler_name,
builderNames=[pipeline_builder_name],
) )
] ]
@ -590,12 +626,15 @@ def populate(devops_env_id):
) )
] ]
pipeline_scheduler_name = f"{track_id}-code-{pipeline_type}-lint-triggerable" pipeline_scheduler_name = (
f"{track_id}-code-{pipeline_type}-lint-triggerable"
)
triggerable_scheduler_names += [pipeline_scheduler_name] triggerable_scheduler_names += [pipeline_scheduler_name]
schedulers += [ schedulers += [
plugins_schedulers.Triggerable( plugins_schedulers.Triggerable(
name=pipeline_scheduler_name, builderNames=[pipeline_builder_name] name=pipeline_scheduler_name,
builderNames=[pipeline_builder_name],
) )
] ]
@ -603,39 +642,55 @@ def populate(devops_env_id):
if triggerable_scheduler_names: if triggerable_scheduler_names:
trigger_properties = { trigger_properties = {
"python_module": buildbot.plugins.util.Property("python_module"), "python_module": buildbot.plugins.util.Property("python_module"),
"needs_full_clean": buildbot.plugins.util.Property("needs_full_clean"), "needs_full_clean": buildbot.plugins.util.Property(
"needs_full_clean"
),
"needs_package_delivery": buildbot.plugins.util.Property( "needs_package_delivery": buildbot.plugins.util.Property(
"needs_package_delivery" "needs_package_delivery"
), ),
"needs_gpu_binaries": buildbot.plugins.util.Property("needs_gpu_binaries"), "needs_gpu_binaries": buildbot.plugins.util.Property(
"needs_gpu_tests": buildbot.plugins.util.Property("needs_gpu_tests"), "needs_gpu_binaries"
"needs_skip_tests": buildbot.plugins.util.Property("needs_skip_tests"), ),
"needs_gpu_tests": buildbot.plugins.util.Property(
"needs_gpu_tests"
),
"needs_skip_tests": buildbot.plugins.util.Property(
"needs_skip_tests"
),
"platform_architectures": buildbot.plugins.util.Property( "platform_architectures": buildbot.plugins.util.Property(
"platform_architectures" "platform_architectures"
), ),
} }
if pipeline_type == "patch": if pipeline_type == "patch":
trigger_properties["patch_id"] = buildbot.plugins.util.Property("patch_id") trigger_properties["patch_id"] = buildbot.plugins.util.Property(
trigger_properties["revision"] = buildbot.plugins.util.Property("revision") "patch_id"
trigger_properties["build_configuration"] = buildbot.plugins.util.Property( )
"build_configuration" trigger_properties["revision"] = buildbot.plugins.util.Property(
"revision"
)
trigger_properties["build_configuration"] = (
buildbot.plugins.util.Property("build_configuration")
) )
trigger_factory.addStep( trigger_factory.addStep(
plugins_steps.SetProperties( plugins_steps.SetProperties(
name="get-revision", properties=gitea.blender.get_patch_revision name="get-revision",
properties=gitea.blender.get_patch_revision,
) )
) )
elif pipeline_type == "experimental": elif pipeline_type == "experimental":
trigger_properties["override_branch_id"] = buildbot.plugins.util.Property( trigger_properties["override_branch_id"] = (
"override_branch_id" buildbot.plugins.util.Property("override_branch_id")
) )
trigger_properties["revision"] = buildbot.plugins.util.Property("revision") trigger_properties["revision"] = buildbot.plugins.util.Property(
trigger_properties["build_configuration"] = buildbot.plugins.util.Property( "revision"
"build_configuration" )
trigger_properties["build_configuration"] = (
buildbot.plugins.util.Property("build_configuration")
) )
trigger_factory.addStep( trigger_factory.addStep(
plugins_steps.SetProperties( plugins_steps.SetProperties(
name="get-revision", properties=gitea.blender.get_branch_revision name="get-revision",
properties=gitea.blender.get_branch_revision,
) )
) )
@ -650,7 +705,9 @@ def populate(devops_env_id):
) )
) )
coordinator_builder_name = f"{track_id}-code-{pipeline_type}-coordinator" coordinator_builder_name = (
f"{track_id}-code-{pipeline_type}-coordinator"
)
builder_tags = coordinator_builder_name.split("-") builder_tags = coordinator_builder_name.split("-")
builders += [ builders += [
@ -662,7 +719,9 @@ def populate(devops_env_id):
) )
] ]
coordinator_scheduler_name = f"{track_id}-code-{pipeline_type}-coordinator-force" coordinator_scheduler_name = (
f"{track_id}-code-{pipeline_type}-coordinator-force"
)
schedulers += [ schedulers += [
plugins_schedulers.ForceScheduler( plugins_schedulers.ForceScheduler(
name=coordinator_scheduler_name, name=coordinator_scheduler_name,
@ -701,7 +760,8 @@ def populate(devops_env_id):
} }
change_filter = buildbot.plugins.util.ChangeFilter( change_filter = buildbot.plugins.util.ChangeFilter(
project=["blender.git"], branch=code_tracked_branch_ids[track_id] project=["blender.git"],
branch=code_tracked_branch_ids[track_id],
) )
schedulers += [ schedulers += [
plugins_schedulers.SingleBranchScheduler( plugins_schedulers.SingleBranchScheduler(
@ -724,7 +784,9 @@ def populate(devops_env_id):
"needs_package_delivery": True, "needs_package_delivery": True,
"needs_gpu_binaries": True, "needs_gpu_binaries": True,
"build_configuration": "release", "build_configuration": "release",
"platform_architectures": code_all_platform_architectures[track_id], "platform_architectures": code_all_platform_architectures[
track_id
],
} }
nightly_codebases = { nightly_codebases = {
"blender.git": { "blender.git": {

View file

@ -41,7 +41,9 @@ scheduler_properties = [
def create_deliver_binaries_windows_step(worker_config, track_id, pipeline_type): def create_deliver_binaries_windows_step(worker_config, track_id, pipeline_type):
# Create step for uploading msix to download.blender.org. # Create step for uploading msix to download.blender.org.
file_size_in_mb = 500 * 1024 * 1024 file_size_in_mb = 500 * 1024 * 1024
worker_source_path = pathlib.Path(f"../../../../git/blender-{track_id}/build_package") worker_source_path = pathlib.Path(
f"../../../../git/blender-{track_id}/build_package"
)
master_dest_path = pathlib.Path( master_dest_path = pathlib.Path(
f"{worker_config.buildbot_download_folder}/{pipeline_type}" f"{worker_config.buildbot_download_folder}/{pipeline_type}"
).expanduser() ).expanduser()
@ -122,8 +124,14 @@ def populate(devops_env_id):
pipeline_build_factory.addStep(step) pipeline_build_factory.addStep(step)
for master_step_name in pipeline.common.code_pipeline_master_step_names: for master_step_name in pipeline.common.code_pipeline_master_step_names:
master_step_command = pipeline.common.create_master_command_args.withArgs( master_step_command = (
devops_env_id, track_id, pipeline_type, master_step_name, single_platform=False pipeline.common.create_master_command_args.withArgs(
devops_env_id,
track_id,
pipeline_type,
master_step_name,
single_platform=False,
)
) )
# Master to archive and purge builds # Master to archive and purge builds

View file

@ -81,9 +81,10 @@ def create_worker_command(script, devops_env_id, track_id, args):
def create_master_command_args( def create_master_command_args(
props, devops_env_id, track_id, pipeline_type, step_name, single_platform props, devops_env_id, track_id, pipeline_type, step_name, single_platform
): ):
build_configuration = fetch_property(props, key="build_configuration", default="release") build_configuration = fetch_property(
props, key="build_configuration", default="release"
)
python_module = fetch_property(props, key="python_module", default=False) python_module = fetch_property(props, key="python_module", default=False)
python_module_string = "true" if python_module else "false"
args = [ args = [
"--pipeline-type", "--pipeline-type",
@ -181,8 +182,12 @@ def create_pipeline(
platform_worker_names = conf.machines.fetch_platform_worker_names(devops_env_id) platform_worker_names = conf.machines.fetch_platform_worker_names(devops_env_id)
local_worker_names = conf.machines.fetch_local_worker_names() local_worker_names = conf.machines.fetch_local_worker_names()
needs_incremental_schedulers = incremental_properties is not None and devops_env_id in ["PROD"] needs_incremental_schedulers = (
needs_nightly_schedulers = nightly_properties is not None and devops_env_id in ["PROD"] incremental_properties is not None and devops_env_id in ["PROD"]
)
needs_nightly_schedulers = nightly_properties is not None and devops_env_id in [
"PROD"
]
track_ids = tracked_branch_ids.keys() track_ids = tracked_branch_ids.keys()
print(f"*** Creating [{artifact_id}] pipeline") print(f"*** Creating [{artifact_id}] pipeline")
@ -297,7 +302,9 @@ def create_pipeline(
] ]
if needs_incremental_schedulers and (track_id in track_ids): if needs_incremental_schedulers and (track_id in track_ids):
incremental_scheduler_name = f"{track_id}-{artifact_id}-coordinator-incremental" incremental_scheduler_name = (
f"{track_id}-{artifact_id}-coordinator-incremental"
)
change_filter = buildbot.plugins.util.ChangeFilter( change_filter = buildbot.plugins.util.ChangeFilter(
project=[codebase], branch=tracked_branch_ids[track_id] project=[codebase], branch=tracked_branch_ids[track_id]
) )

View file

@ -37,7 +37,10 @@ def populate(devops_env_id):
["linux-x86_64-general", "linux-x86_64-general"], ["linux-x86_64-general", "linux-x86_64-general"],
variation_property="doc_format", variation_property="doc_format",
variations=["html", "epub"], variations=["html", "epub"],
incremental_properties={"needs_package_delivery": True, "needs_all_locales": False}, incremental_properties={
"needs_package_delivery": True,
"needs_all_locales": False,
},
nightly_properties={"needs_package_delivery": True, "needs_all_locales": True}, nightly_properties={"needs_package_delivery": True, "needs_all_locales": True},
tree_stable_timer_in_seconds=15 * 60, tree_stable_timer_in_seconds=15 * 60,
do_step_if=pipeline.common.needs_do_doc_pipeline_step, do_step_if=pipeline.common.needs_do_doc_pipeline_step,

View file

@ -52,7 +52,8 @@ class ArchiveBuilder(worker.utils.Builder):
def file_age_in_days(file_path: pathlib.Path) -> float: def file_age_in_days(file_path: pathlib.Path) -> float:
try: try:
file_path_mtime = os.path.getmtime(file_path) file_path_mtime = os.path.getmtime(file_path)
except: except (FileNotFoundError, PermissionError) as e:
print(f"Error accessing file: {e}")
return 0.0 return 0.0
age_in_seconds = time.time() - file_path_mtime age_in_seconds = time.time() - file_path_mtime
@ -60,7 +61,6 @@ def file_age_in_days(file_path: pathlib.Path) -> float:
def parse_build_info(file_path: pathlib.Path) -> Optional[Dict]: def parse_build_info(file_path: pathlib.Path) -> Optional[Dict]:
file_name = file_path.name
matches = re.match(package_file_pattern, file_path.name) matches = re.match(package_file_pattern, file_path.name)
if not matches: if not matches:
return None return None
@ -92,8 +92,6 @@ def fetch_current_builds(
short_version: Optional[str] = None, short_version: Optional[str] = None,
all_platforms: bool = False, all_platforms: bool = False,
) -> Dict[Any, List[Any]]: ) -> Dict[Any, List[Any]]:
app_id = "bpy" if builder.python_module else "blender"
worker_config = builder.get_worker_config() worker_config = builder.get_worker_config()
download_path = worker_config.buildbot_download_folder download_path = worker_config.buildbot_download_folder
pipeline_build_path = download_path / pipeline_type pipeline_build_path = download_path / pipeline_type
@ -109,11 +107,16 @@ def fetch_current_builds(
build_info = parse_build_info(file_path) build_info = parse_build_info(file_path)
if not build_info: if not build_info:
continue continue
if short_version and not build_info["version_id"].startswith(short_version + "."): if short_version and not build_info["version_id"].startswith(
short_version + "."
):
continue continue
if not all_platforms: if not all_platforms:
if builder.architecture and build_info["architecture"] != builder.architecture: if (
builder.architecture
and build_info["architecture"] != builder.architecture
):
continue continue
if builder.platform_id and build_info["platform_id"] != builder.platform_id: if builder.platform_id and build_info["platform_id"] != builder.platform_id:
continue continue
@ -174,9 +177,13 @@ def deduplicate(builder: ArchiveBuilder) -> None:
short_version = branches_config.track_major_minor_versions[builder.track_id] short_version = branches_config.track_major_minor_versions[builder.track_id]
if not short_version: if not short_version:
raise BaseException(f"Missing version in [{builder.pipeline_type}] builds, aborting") raise BaseException(
f"Missing version in [{builder.pipeline_type}] builds, aborting"
)
build_groups = fetch_current_builds(builder, builder.pipeline_type, short_version=short_version) build_groups = fetch_current_builds(
builder, builder.pipeline_type, short_version=short_version
)
print( print(
f"Deduplicating [{builder.pipeline_type}] builds for [{short_version}] [{builder.build_configuration}] [{builder.platform_id}] [{builder.architecture}]" f"Deduplicating [{builder.pipeline_type}] builds for [{short_version}] [{builder.build_configuration}] [{builder.platform_id}] [{builder.architecture}]"
@ -217,14 +224,20 @@ def purge(builder: ArchiveBuilder) -> None:
if pipeline_type != "daily": if pipeline_type != "daily":
print("=" * 120) print("=" * 120)
print(f"Deduplicating [{pipeline_type}] builds") print(f"Deduplicating [{pipeline_type}] builds")
build_groups = fetch_current_builds(builder, pipeline_type, all_platforms=True) build_groups = fetch_current_builds(
builder, pipeline_type, all_platforms=True
)
for key, build_group in build_groups.items(): for key, build_group in build_groups.items():
print("") print("")
print("--- Group: " + str(key)) print("--- Group: " + str(key))
archive_build_group(build_group, builds_retention_in_days, dry_run=dry_run) archive_build_group(
build_group, builds_retention_in_days, dry_run=dry_run
)
print("=" * 120) print("=" * 120)
print(f"Purging [{pipeline_type}] builds older than [{builds_retention_in_days}] days") print(
f"Purging [{pipeline_type}] builds older than [{builds_retention_in_days}] days"
)
for file_path in fetch_purge_builds(builder, pipeline_type, "archive"): for file_path in fetch_purge_builds(builder, pipeline_type, "archive"):
if file_age_in_days(file_path) < builds_retention_in_days: if file_age_in_days(file_path) < builds_retention_in_days:
continue continue
@ -237,7 +250,9 @@ def purge(builder: ArchiveBuilder) -> None:
worker.utils.remove_file(checksum_file_path, dry_run=dry_run) worker.utils.remove_file(checksum_file_path, dry_run=dry_run)
print("=" * 120) print("=" * 120)
print(f"Purging [{pipeline_type}] tests older than [{tests_retention_in_days}] days") print(
f"Purging [{pipeline_type}] tests older than [{tests_retention_in_days}] days"
)
for file_path in fetch_purge_builds(builder, pipeline_type, "tests"): for file_path in fetch_purge_builds(builder, pipeline_type, "tests"):
if file_age_in_days(file_path) < tests_retention_in_days: if file_age_in_days(file_path) < tests_retention_in_days:
continue continue
@ -256,7 +271,7 @@ def generate_test_data(builder: ArchiveBuilder) -> None:
branches_config = builder.get_branches_config() branches_config = builder.get_branches_config()
short_version = branches_config.track_major_minor_versions[builder.track_id] short_version = branches_config.track_major_minor_versions[builder.track_id]
version = short_version + ".0" short_version + ".0"
app_id = "bpy" if builder.python_module else "blender" app_id = "bpy" if builder.python_module else "blender"
commit_hashes = ["1ddf858", "03a2a53"] commit_hashes = ["1ddf858", "03a2a53"]
@ -319,9 +334,15 @@ if __name__ == "__main__":
parser = worker.utils.create_argument_parser(steps=steps) parser = worker.utils.create_argument_parser(steps=steps)
parser.add_argument( parser.add_argument(
"--pipeline-type", default="daily", type=str, choices=pipeline_types, required=False "--pipeline-type",
default="daily",
type=str,
choices=pipeline_types,
required=False,
)
parser.add_argument(
"--platform-id", default="", type=str, choices=platforms, required=False
) )
parser.add_argument("--platform-id", default="", type=str, choices=platforms, required=False)
parser.add_argument( parser.add_argument(
"--architecture", default="", type=str, choices=architectures, required=False "--architecture", default="", type=str, choices=architectures, required=False
) )

View file

@ -9,9 +9,6 @@ import pathlib
import re import re
import subprocess import subprocess
from collections import OrderedDict
from typing import Callable, Any
import worker.utils import worker.utils
@ -32,7 +29,9 @@ class CodeBuilder(worker.utils.Builder):
self.architecture = args.architecture self.architecture = args.architecture
if self.platform == "darwin": if self.platform == "darwin":
self.build_dir = track_path / f"build_{self.architecture}_{self.build_configuration}" self.build_dir = (
track_path / f"build_{self.architecture}_{self.build_configuration}"
)
else: else:
self.build_dir = track_path / f"build_{self.build_configuration}" self.build_dir = track_path / f"build_{self.build_configuration}"
@ -47,7 +46,9 @@ class CodeBuilder(worker.utils.Builder):
worker.utils.remove_dir(self.build_doc_path) worker.utils.remove_dir(self.build_doc_path)
# Call command with in compiler environment. # Call command with in compiler environment.
def call(self, cmd: worker.utils.CmdSequence, env: worker.utils.CmdEnvironment = None) -> int: def call(
self, cmd: worker.utils.CmdSequence, env: worker.utils.CmdEnvironment = None
) -> int:
cmd_prefix: worker.utils.CmdList = [] cmd_prefix: worker.utils.CmdList = []
if self.platform == "darwin": if self.platform == "darwin":
@ -57,11 +58,16 @@ class CodeBuilder(worker.utils.Builder):
xcode_version = xcode.get("version", None) if xcode else None xcode_version = xcode.get("version", None) if xcode else None
if xcode_version: if xcode_version:
developer_dir = f"/Applications/Xcode-{xcode_version}.app/Contents/Developer" developer_dir = (
f"/Applications/Xcode-{xcode_version}.app/Contents/Developer"
)
else: else:
developer_dir = "/Applications/Xcode.app/Contents/Developer" developer_dir = "/Applications/Xcode.app/Contents/Developer"
if self.service_env_id == "LOCAL" and not pathlib.Path(developer_dir).exists(): if (
self.service_env_id == "LOCAL"
and not pathlib.Path(developer_dir).exists()
):
worker.utils.warning( worker.utils.warning(
f"Skip using non-existent {developer_dir} in LOCAL service environment" f"Skip using non-existent {developer_dir} in LOCAL service environment"
) )
@ -84,7 +90,9 @@ class CodeBuilder(worker.utils.Builder):
return worker.utils.call(cmd_prefix + list(cmd), env=env) return worker.utils.call(cmd_prefix + list(cmd), env=env)
def pipeline_config(self) -> dict: def pipeline_config(self) -> dict:
config_file_path = self.code_path / "build_files" / "config" / "pipeline_config.json" config_file_path = (
self.code_path / "build_files" / "config" / "pipeline_config.json"
)
if not config_file_path.exists(): if not config_file_path.exists():
config_file_path = config_file_path.with_suffix(".yaml") config_file_path = config_file_path.with_suffix(".yaml")
if not config_file_path.exists(): if not config_file_path.exists():
@ -116,7 +124,9 @@ class CodeBuilder(worker.utils.Builder):
# CMake goes first to avoid using chocolaty cpack command. # CMake goes first to avoid using chocolaty cpack command.
worker.utils.info("Setting CMake path") worker.utils.info("Setting CMake path")
os.environ["PATH"] = "C:\\Program Files\\CMake\\bin" + os.pathsep + os.environ["PATH"] os.environ["PATH"] = (
"C:\\Program Files\\CMake\\bin" + os.pathsep + os.environ["PATH"]
)
worker.utils.info("Setting VC Tools env variables") worker.utils.info("Setting VC Tools env variables")
windows_build_version = "10.0.19041.0" windows_build_version = "10.0.19041.0"
@ -126,7 +136,9 @@ class CodeBuilder(worker.utils.Builder):
+ os.environ["PATH"] + os.environ["PATH"]
) )
os.environ["PATH"] = ( os.environ["PATH"] = (
"C:\\Program Files (x86)\\WiX Toolset v3.11\\bin" + os.pathsep + os.environ["PATH"] "C:\\Program Files (x86)\\WiX Toolset v3.11\\bin"
+ os.pathsep
+ os.environ["PATH"]
) )
if self.architecture == "arm64": if self.architecture == "arm64":
@ -140,7 +152,9 @@ class CodeBuilder(worker.utils.Builder):
) )
vs_tool_install_dir_suffix = "\\bin\\Hostx64\\x64" vs_tool_install_dir_suffix = "\\bin\\Hostx64\\x64"
vcvars_output = subprocess.check_output([vs_build_tool_path, "&&", "set"], shell=True) vcvars_output = subprocess.check_output(
[vs_build_tool_path, "&&", "set"], shell=True
)
vcvars_text = vcvars_output.decode("utf-8", "ignore") vcvars_text = vcvars_output.decode("utf-8", "ignore")
for line in vcvars_text.splitlines(): for line in vcvars_text.splitlines():

View file

@ -14,7 +14,6 @@ import worker.blender
import worker.utils import worker.utils
def create_upload( def create_upload(
builder: worker.blender.CodeBuilder, benchmark_path: pathlib.Path, revision: str builder: worker.blender.CodeBuilder, benchmark_path: pathlib.Path, revision: str
) -> None: ) -> None:

View file

@ -95,7 +95,9 @@ def estimate_dmg_size(app_bundles: typing.List[pathlib.Path]) -> int:
return app_bundles_size + _extra_dmg_size_in_bytes return app_bundles_size + _extra_dmg_size_in_bytes
def copy_app_bundles(app_bundles: typing.List[pathlib.Path], dir_path: pathlib.Path) -> None: def copy_app_bundles(
app_bundles: typing.List[pathlib.Path], dir_path: pathlib.Path
) -> None:
""" """
Copy all bundles to a given directory Copy all bundles to a given directory
@ -122,7 +124,9 @@ def get_main_app_bundle(app_bundles: typing.List[pathlib.Path]) -> pathlib.Path:
def create_dmg_image( def create_dmg_image(
app_bundles: typing.List[pathlib.Path], dmg_file_path: pathlib.Path, volume_name: str app_bundles: typing.List[pathlib.Path],
dmg_file_path: pathlib.Path,
volume_name: str,
) -> None: ) -> None:
""" """
Create DMG disk image and put app bundles in it Create DMG disk image and put app bundles in it
@ -134,7 +138,9 @@ def create_dmg_image(
worker.utils.remove_file(dmg_file_path) worker.utils.remove_file(dmg_file_path)
temp_content_path = tempfile.TemporaryDirectory(prefix="blender-dmg-content-") temp_content_path = tempfile.TemporaryDirectory(prefix="blender-dmg-content-")
worker.utils.info(f"Preparing directory with app bundles for the DMG [{temp_content_path}]") worker.utils.info(
f"Preparing directory with app bundles for the DMG [{temp_content_path}]"
)
with temp_content_path as content_dir_str: with temp_content_path as content_dir_str:
# Copy all bundles to a clean directory. # Copy all bundles to a clean directory.
content_dir_path = pathlib.Path(content_dir_str) content_dir_path = pathlib.Path(content_dir_str)
@ -236,13 +242,17 @@ def eject_volume(volume_name: str) -> None:
if tokens[1] != "on": if tokens[1] != "on":
continue continue
if device: if device:
raise Exception(f"Multiple devices found for mounting point [{mount_directory}]") raise Exception(
f"Multiple devices found for mounting point [{mount_directory}]"
)
device = tokens[0] device = tokens[0]
if not device: if not device:
raise Exception(f"No device found for mounting point [{mount_directory}]") raise Exception(f"No device found for mounting point [{mount_directory}]")
worker.utils.info(f"[{mount_directory}] is mounted as device [{device}], ejecting...") worker.utils.info(
f"[{mount_directory}] is mounted as device [{device}], ejecting..."
)
command = ["diskutil", "eject", device] command = ["diskutil", "eject", device]
worker.utils.call(command) worker.utils.call(command)
@ -297,7 +307,9 @@ def run_applescript_file_path(
needs_run_applescript = True needs_run_applescript = True
if not needs_run_applescript: if not needs_run_applescript:
worker.utils.info(f"Having issues with apple script on [{architecture}], skipping !") worker.utils.info(
f"Having issues with apple script on [{architecture}], skipping !"
)
return return
temp_script_file_path = tempfile.NamedTemporaryFile(mode="w", suffix=".applescript") temp_script_file_path = tempfile.NamedTemporaryFile(mode="w", suffix=".applescript")
@ -316,8 +328,12 @@ def run_applescript_file_path(
if not background_image_file_path: if not background_image_file_path:
continue continue
else: else:
background_image_short = f".background:{background_image_file_path.name}" background_image_short = (
line = re.sub('to file ".*"', f'to file "{background_image_short}"', line) f".background:{background_image_file_path.name}"
)
line = re.sub(
'to file ".*"', f'to file "{background_image_short}"', line
)
line = line.replace("blender.app", main_app_bundle.name) line = line.replace("blender.app", main_app_bundle.name)
stripped_line = line.rstrip("\r\n") stripped_line = line.rstrip("\r\n")
worker.utils.info(f"line={stripped_line}") worker.utils.info(f"line={stripped_line}")
@ -343,7 +359,9 @@ def run_applescript_file_path(
time.sleep(5) time.sleep(5)
def compress_dmg(writable_dmg_file_path: pathlib.Path, final_dmg_file_path: pathlib.Path) -> None: def compress_dmg(
writable_dmg_file_path: pathlib.Path, final_dmg_file_path: pathlib.Path
) -> None:
""" """
Compress temporary read-write DMG Compress temporary read-write DMG
""" """
@ -469,5 +487,9 @@ def bundle(
worker.utils.info(f"Will produce DMG [{dmg_file_path.name}]") worker.utils.info(f"Will produce DMG [{dmg_file_path.name}]")
create_final_dmg( create_final_dmg(
app_bundles, dmg_file_path, background_image_file_path, volume_name, applescript_file_path app_bundles,
dmg_file_path,
background_image_file_path,
volume_name,
applescript_file_path,
) )

View file

@ -41,15 +41,21 @@ def fetch_ideal_cpu_count(estimate_core_memory_in_mb: int) -> int:
worker.utils.info(f"usable_memory_in_bytes={usable_memory_in_bytes}") worker.utils.info(f"usable_memory_in_bytes={usable_memory_in_bytes}")
estimate_memory_per_code_in_bytes = estimate_core_memory_in_mb * 1024 * 1024 estimate_memory_per_code_in_bytes = estimate_core_memory_in_mb * 1024 * 1024
worker.utils.info(f"estimate_memory_per_code_in_bytes={estimate_memory_per_code_in_bytes}") worker.utils.info(
f"estimate_memory_per_code_in_bytes={estimate_memory_per_code_in_bytes}"
)
capable_cpu_count = int(total_memory_in_bytes / estimate_memory_per_code_in_bytes) capable_cpu_count = int(
total_memory_in_bytes / estimate_memory_per_code_in_bytes
)
worker.utils.info(f"capable_cpu_count={capable_cpu_count}") worker.utils.info(f"capable_cpu_count={capable_cpu_count}")
min_cpu_count = min(total_cpu_count, capable_cpu_count) min_cpu_count = min(total_cpu_count, capable_cpu_count)
worker.utils.info(f"min_cpu_count={min_cpu_count}") worker.utils.info(f"min_cpu_count={min_cpu_count}")
ideal_cpu_count = min_cpu_count if min_cpu_count <= 8 else min_cpu_count - spare_cpu_count ideal_cpu_count = (
min_cpu_count if min_cpu_count <= 8 else min_cpu_count - spare_cpu_count
)
worker.utils.info(f"ideal_cpu_count={ideal_cpu_count}") worker.utils.info(f"ideal_cpu_count={ideal_cpu_count}")
return ideal_cpu_count return ideal_cpu_count
@ -88,9 +94,13 @@ def get_cmake_options(builder: worker.blender.CodeBuilder) -> worker.utils.CmdSe
platform_config_file_path = "build_files/buildbot/config/blender_windows.cmake" platform_config_file_path = "build_files/buildbot/config/blender_windows.cmake"
if platform_config_file_path: if platform_config_file_path:
worker.utils.info(f'Trying platform-specific buildbot configuration "{platform_config_file_path}"') worker.utils.info(
f'Trying platform-specific buildbot configuration "{platform_config_file_path}"'
)
if (Path(builder.blender_dir) / platform_config_file_path).exists(): if (Path(builder.blender_dir) / platform_config_file_path).exists():
worker.utils.info(f'Using platform-specific buildbot configuration "{platform_config_file_path}"') worker.utils.info(
f'Using platform-specific buildbot configuration "{platform_config_file_path}"'
)
config_file_path = platform_config_file_path config_file_path = platform_config_file_path
else: else:
worker.utils.info(f'Using generic buildbot configuration "{config_file_path}"') worker.utils.info(f'Using generic buildbot configuration "{config_file_path}"')
@ -145,13 +155,17 @@ def get_cmake_options(builder: worker.blender.CodeBuilder) -> worker.utils.CmdSe
vc_tool_install_path = pathlib.PureWindowsPath(vc_tools_install_dir) vc_tool_install_path = pathlib.PureWindowsPath(vc_tools_install_dir)
if builder.architecture == "arm64": if builder.architecture == "arm64":
compiler_file_path="C:/Program Files/LLVM/bin/clang-cl.exe" compiler_file_path = "C:/Program Files/LLVM/bin/clang-cl.exe"
compiler_file_path="C:/Program Files/LLVM/bin/clang-cl.exe" compiler_file_path = "C:/Program Files/LLVM/bin/clang-cl.exe"
linker_file_path="C:/Program Files/LLVM/bin/lld-link.exe" linker_file_path = "C:/Program Files/LLVM/bin/lld-link.exe"
else: else:
vs_tool_install_dir_suffix = "bin/Hostx64/x64" vs_tool_install_dir_suffix = "bin/Hostx64/x64"
compiler_file_path = str(vc_tool_install_path / f"{vs_tool_install_dir_suffix}/cl.exe") compiler_file_path = str(
linker_file_path = str(vc_tool_install_path / f"{vs_tool_install_dir_suffix}/link.exe") vc_tool_install_path / f"{vs_tool_install_dir_suffix}/cl.exe"
)
linker_file_path = str(
vc_tool_install_path / f"{vs_tool_install_dir_suffix}/link.exe"
)
options += ["-G", "Ninja"] options += ["-G", "Ninja"]
# -DWITH_WINDOWS_SCCACHE=On # -DWITH_WINDOWS_SCCACHE=On
@ -194,7 +208,9 @@ def get_cmake_options(builder: worker.blender.CodeBuilder) -> worker.utils.CmdSe
for cmake_key in cmake_overrides.keys(): for cmake_key in cmake_overrides.keys():
for restricted_key_pattern in restricted_key_patterns: for restricted_key_pattern in restricted_key_patterns:
if restricted_key_pattern in cmake_key: if restricted_key_pattern in cmake_key:
raise Exception(f"CMake key [{cmake_key}] cannot be overriden, aborting") raise Exception(
f"CMake key [{cmake_key}] cannot be overriden, aborting"
)
for cmake_key, cmake_value in cmake_overrides.items(): for cmake_key, cmake_value in cmake_overrides.items():
options += [f"-D{cmake_key}={cmake_value}"] options += [f"-D{cmake_key}={cmake_value}"]
@ -238,7 +254,9 @@ def get_cmake_options(builder: worker.blender.CodeBuilder) -> worker.utils.CmdSe
ocloc_version = "dev_01" ocloc_version = "dev_01"
options += [f"-DHIP_ROOT_DIR=C:/ProgramData/AMD/HIP/hip_sdk_{hip_version}"] options += [f"-DHIP_ROOT_DIR=C:/ProgramData/AMD/HIP/hip_sdk_{hip_version}"]
options += ["-DHIP_PERL_DIR=C:/ProgramData/AMD/HIP/strawberry/perl/bin"] options += ["-DHIP_PERL_DIR=C:/ProgramData/AMD/HIP/strawberry/perl/bin"]
options += [f"-DOCLOC_INSTALL_DIR=C:/ProgramData/Intel/ocloc/ocloc_{ocloc_version}"] options += [
f"-DOCLOC_INSTALL_DIR=C:/ProgramData/Intel/ocloc/ocloc_{ocloc_version}"
]
elif builder.platform == "linux": elif builder.platform == "linux":
# CUDA on Linux # CUDA on Linux
options += [f"-DWITH_CYCLES_CUDA_BINARIES={with_gpu_binaries_state}"] options += [f"-DWITH_CYCLES_CUDA_BINARIES={with_gpu_binaries_state}"]
@ -300,22 +318,20 @@ def get_cmake_options(builder: worker.blender.CodeBuilder) -> worker.utils.CmdSe
options += [ options += [
f"-DHIPRT_ROOT_DIR:PATH={hiprt_base_dir}/hiprtsdk-{hiprt_version}/hiprt{hiprt_version}" f"-DHIPRT_ROOT_DIR:PATH={hiprt_base_dir}/hiprtsdk-{hiprt_version}/hiprt{hiprt_version}"
] ]
# Enable option to verify enabled libraries and features did not get disabled. # Enable option to verify enabled libraries and features did not get disabled.
options += ["-DWITH_STRICT_BUILD_OPTIONS=ON"] options += ["-DWITH_STRICT_BUILD_OPTIONS=ON"]
needs_cuda_compile = builder.needs_gpu_binaries
if builder.needs_gpu_binaries: if builder.needs_gpu_binaries:
try: try:
cuda10_version = buildbotConfig["cuda10"]["version"] cuda10_version = buildbotConfig["cuda10"]["version"]
except: except KeyError:
cuda10_version = buildbotConfig["sdks"]["cuda10"]["version"] cuda10_version = buildbotConfig["sdks"]["cuda10"]["version"]
cuda10_folder_version = ".".join(cuda10_version.split(".")[:2]) cuda10_folder_version = ".".join(cuda10_version.split(".")[:2])
try: try:
cuda11_version = buildbotConfig["cuda11"]["version"] cuda11_version = buildbotConfig["cuda11"]["version"]
except: except KeyError:
cuda11_version = buildbotConfig["sdks"]["cuda11"]["version"] cuda11_version = buildbotConfig["sdks"]["cuda11"]["version"]
cuda11_folder_version = ".".join(cuda11_version.split(".")[:2]) cuda11_folder_version = ".".join(cuda11_version.split(".")[:2])
@ -324,7 +340,7 @@ def get_cmake_options(builder: worker.blender.CodeBuilder) -> worker.utils.CmdSe
cuda12_version = buildbotConfig["cuda12"]["version"] cuda12_version = buildbotConfig["cuda12"]["version"]
cuda12_folder_version = ".".join(cuda12_version.split(".")[:2]) cuda12_folder_version = ".".join(cuda12_version.split(".")[:2])
have_cuda12 = True have_cuda12 = True
except: except KeyError:
have_cuda12 = False have_cuda12 = False
if builder.platform == "windows" and builder.architecture != "arm64": if builder.platform == "windows" and builder.architecture != "arm64":
@ -408,7 +424,9 @@ def get_cmake_options(builder: worker.blender.CodeBuilder) -> worker.utils.CmdSe
else: else:
# Use new CMake option. # Use new CMake option.
options += [f"-DCUDA10_NVCC_EXECUTABLE:STRING={cuda10_file_path}"] options += [f"-DCUDA10_NVCC_EXECUTABLE:STRING={cuda10_file_path}"]
options += ["-DCUDA_HOST_COMPILER=/opt/rh/devtoolset-8/root/usr/bin/gcc"] options += [
"-DCUDA_HOST_COMPILER=/opt/rh/devtoolset-8/root/usr/bin/gcc"
]
# CUDA 11 or 12. # CUDA 11 or 12.
if have_cuda12: if have_cuda12:
@ -428,7 +446,9 @@ def get_cmake_options(builder: worker.blender.CodeBuilder) -> worker.utils.CmdSe
def clean_directories(builder: worker.blender.CodeBuilder) -> None: def clean_directories(builder: worker.blender.CodeBuilder) -> None:
worker.utils.info(f"Cleaning directory [{builder.install_dir})] from the previous run") worker.utils.info(
f"Cleaning directory [{builder.install_dir})] from the previous run"
)
worker.utils.remove_dir(builder.install_dir) worker.utils.remove_dir(builder.install_dir)
os.makedirs(builder.build_dir, exist_ok=True) os.makedirs(builder.build_dir, exist_ok=True)
@ -452,7 +472,9 @@ def cmake_configure(builder: worker.blender.CodeBuilder) -> None:
worker.utils.info("CMake configure options") worker.utils.info("CMake configure options")
cmake_options = get_cmake_options(builder) cmake_options = get_cmake_options(builder)
cmd = ["cmake", "-S", builder.blender_dir, "-B", builder.build_dir] + list(cmake_options) cmd = ["cmake", "-S", builder.blender_dir, "-B", builder.build_dir] + list(
cmake_options
)
builder.call(cmd) builder.call(cmd)
# This hack does not work as expected, since cmake cache is the always updated, we end up recompiling on each compile step, code, gpu and install # This hack does not work as expected, since cmake cache is the always updated, we end up recompiling on each compile step, code, gpu and install
@ -465,7 +487,10 @@ def cmake_configure(builder: worker.blender.CodeBuilder) -> None:
fout = open(tmp_cmake_cache_file_path, "wt") fout = open(tmp_cmake_cache_file_path, "wt")
for line in fin: for line in fin:
# worker.utils.info(line) # worker.utils.info(line)
if "OpenMP_pthread_LIBRARY:FILEPATH=OpenMP_pthread_LIBRARY-NOTFOUND" in line: if (
"OpenMP_pthread_LIBRARY:FILEPATH=OpenMP_pthread_LIBRARY-NOTFOUND"
in line
):
worker.utils.warning( worker.utils.warning(
"Replacing [OpenMP_pthread_LIBRARY-NOTFOUND] to [/usr/lib64/libpthread.a]" "Replacing [OpenMP_pthread_LIBRARY-NOTFOUND] to [/usr/lib64/libpthread.a]"
) )
@ -489,7 +514,9 @@ def cmake_build(builder: worker.blender.CodeBuilder, do_install: bool) -> None:
else: else:
estimate_gpu_memory_in_mb = 6000 estimate_gpu_memory_in_mb = 6000
estimate_core_memory_in_mb = estimate_gpu_memory_in_mb if builder.needs_gpu_binaries else 1000 estimate_core_memory_in_mb = (
estimate_gpu_memory_in_mb if builder.needs_gpu_binaries else 1000
)
ideal_cpu_count = fetch_ideal_cpu_count(estimate_core_memory_in_mb) ideal_cpu_count = fetch_ideal_cpu_count(estimate_core_memory_in_mb)
# Enable verbose building to make ninja to output more often. # Enable verbose building to make ninja to output more often.

View file

@ -82,7 +82,10 @@ def create_tar_xz(src: pathlib.Path, dest: pathlib.Path, package_name: str) -> N
for root, dirs, files in os.walk(src): for root, dirs, files in os.walk(src):
package_root = os.path.join(package_name, root[ln:]) package_root = os.path.join(package_name, root[ln:])
flist.extend( flist.extend(
[(os.path.join(root, file), os.path.join(package_root, file)) for file in files] [
(os.path.join(root, file), os.path.join(package_root, file))
for file in files
]
) )
# Set UID/GID of archived files to 0, otherwise they'd be owned by whatever # Set UID/GID of archived files to 0, otherwise they'd be owned by whatever
@ -112,7 +115,7 @@ def cleanup_files(dirpath: pathlib.Path, extension: str) -> None:
def pack_mac(builder: worker.blender.CodeBuilder) -> None: def pack_mac(builder: worker.blender.CodeBuilder) -> None:
version_info = worker.blender.version.VersionInfo(builder) worker.blender.version.VersionInfo(builder)
os.chdir(builder.build_dir) os.chdir(builder.build_dir)
cleanup_files(builder.package_dir, ".dmg") cleanup_files(builder.package_dir, ".dmg")
@ -121,15 +124,24 @@ def pack_mac(builder: worker.blender.CodeBuilder) -> None:
package_file_name = package_name + ".dmg" package_file_name = package_name + ".dmg"
package_file_path = builder.package_dir / package_file_name package_file_path = builder.package_dir / package_file_name
applescript_file_path = pathlib.Path(__file__).parent.resolve() / "blender.applescript" applescript_file_path = (
background_image_file_path = builder.blender_dir / "release" / "darwin" / "background.tif" pathlib.Path(__file__).parent.resolve() / "blender.applescript"
)
background_image_file_path = (
builder.blender_dir / "release" / "darwin" / "background.tif"
)
worker.blender.bundle_dmg.bundle( worker.blender.bundle_dmg.bundle(
builder.install_dir, package_file_path, applescript_file_path, background_image_file_path builder.install_dir,
package_file_path,
applescript_file_path,
background_image_file_path,
) )
# Sign # Sign
worker.blender.sign.sign_darwin_files(builder, [package_file_path], "entitlements.plist") worker.blender.sign.sign_darwin_files(
builder, [package_file_path], "entitlements.plist"
)
# Notarize # Notarize
worker_config = builder.get_worker_config() worker_config = builder.get_worker_config()
@ -169,7 +181,14 @@ def pack_mac(builder: worker.blender.CodeBuilder) -> None:
# Show logs # Show logs
worker.utils.call( worker.utils.call(
["xcrun", "notarytool", "log", "--keychain-profile", keychain_profile, request_id], [
"xcrun",
"notarytool",
"log",
"--keychain-profile",
keychain_profile,
request_id,
],
retry_count=5, retry_count=5,
retry_wait_time=10.0, retry_wait_time=10.0,
) )
@ -262,14 +281,17 @@ def pack_win(builder: worker.blender.CodeBuilder, pack_format: str) -> None:
/ "ZIP" / "ZIP"
/ f"{final_package_file_name}" / f"{final_package_file_name}"
) )
worker.utils.info(f"Moving [{source_cpack_file_path}] to [{final_package_file_path}]") worker.utils.info(
f"Moving [{source_cpack_file_path}] to [{final_package_file_path}]"
)
os.rename(source_cpack_file_path, final_package_file_path) os.rename(source_cpack_file_path, final_package_file_path)
else: else:
os.rename(bogus_cpack_file_path, final_package_file_path) os.rename(bogus_cpack_file_path, final_package_file_path)
version_info = worker.blender.version.VersionInfo(builder) version_info = worker.blender.version.VersionInfo(builder)
description = f"Blender {version_info.version}" description = f"Blender {version_info.version}"
worker.blender.sign.sign_windows_files(builder.service_env_id, [final_package_file_path], worker.blender.sign.sign_windows_files(
description=description) builder.service_env_id, [final_package_file_path], description=description
)
generate_file_hash(final_package_file_path) generate_file_hash(final_package_file_path)
@ -289,9 +311,13 @@ def pack_linux(builder: worker.blender.CodeBuilder) -> None:
py_target = builder.install_dir / version_info.short_version py_target = builder.install_dir / version_info.short_version
if not os.path.exists(py_target): if not os.path.exists(py_target):
# Support older format and current issue with 3.00 # Support older format and current issue with 3.00
py_target = builder.install_dir / ("%d.%02d" % (version_info.major, version_info.minor)) py_target = builder.install_dir / (
"%d.%02d" % (version_info.major, version_info.minor)
)
worker.utils.call(["find", py_target, "-iname", "*.so", "-exec", "strip", "-s", "{}", ";"]) worker.utils.call(
["find", py_target, "-iname", "*.so", "-exec", "strip", "-s", "{}", ";"]
)
package_name = get_package_name(builder) package_name = get_package_name(builder)
package_file_name = f"{package_name}.tar.xz" package_file_name = f"{package_name}.tar.xz"

View file

@ -22,7 +22,7 @@ def sign_windows_files(
worker_config = conf.worker.get_config(service_env_id) worker_config = conf.worker.get_config(service_env_id)
# TODO: Rotate them if first 1 fails # TODO: Rotate them if first 1 fails
timeserver = worker_config.sign_code_windows_time_servers[0] worker_config.sign_code_windows_time_servers[0]
server_url = worker_config.sign_code_windows_server_url server_url = worker_config.sign_code_windows_server_url
if not certificate_id: if not certificate_id:
certificate_id = worker_config.sign_code_windows_certificate certificate_id = worker_config.sign_code_windows_certificate
@ -50,7 +50,9 @@ def sign_windows_files(
for i in range(0, len(file_paths), chunk_size): for i in range(0, len(file_paths), chunk_size):
file_chunks = file_paths[i : i + chunk_size] file_chunks = file_paths[i : i + chunk_size]
worker.utils.call(list(cmd) + list(file_chunks), retry_count=retry_count, dry_run=dry_run) worker.utils.call(
list(cmd) + list(file_chunks), retry_count=retry_count, dry_run=dry_run
)
def sign_windows(service_env_id: str, install_path: pathlib.Path) -> None: def sign_windows(service_env_id: str, install_path: pathlib.Path) -> None:
@ -97,9 +99,11 @@ def sign_windows(service_env_id: str, install_path: pathlib.Path) -> None:
def sign_darwin_files( def sign_darwin_files(
builder: worker.blender.CodeBuilder, builder: worker.blender.CodeBuilder,
file_paths: Sequence[pathlib.Path], file_paths: Sequence[pathlib.Path],
entitlements_file_name: str entitlements_file_name: str,
) -> None: ) -> None:
entitlements_path = builder.code_path / "release" / "darwin" / entitlements_file_name entitlements_path = (
builder.code_path / "release" / "darwin" / entitlements_file_name
)
if not entitlements_path.exists(): if not entitlements_path.exists():
raise Exception(f"File {entitlements_path} not found, aborting") raise Exception(f"File {entitlements_path} not found, aborting")
@ -128,7 +132,9 @@ def sign_darwin_files(
# Remove signature # Remove signature
if file_path.suffix != ".dmg": if file_path.suffix != ".dmg":
worker.utils.call( worker.utils.call(
["codesign", "--remove-signature", file_path], exit_on_error=False, dry_run=dry_run ["codesign", "--remove-signature", file_path],
exit_on_error=False,
dry_run=dry_run,
) )
# Add signature # Add signature
@ -163,11 +169,15 @@ def sign_darwin(builder: worker.blender.CodeBuilder) -> None:
sign_darwin_files(builder, list(sign_path.rglob("*")), "entitlements.plist") sign_darwin_files(builder, list(sign_path.rglob("*")), "entitlements.plist")
# Thumbnailer app extension. # Thumbnailer app extension.
thumbnailer_appex_path = bundle_path / "Contents" / "PlugIns" / "blender-thumbnailer.appex" thumbnailer_appex_path = (
bundle_path / "Contents" / "PlugIns" / "blender-thumbnailer.appex"
)
if thumbnailer_appex_path.exists(): if thumbnailer_appex_path.exists():
sign_path = thumbnailer_appex_path / "Contents" / "MacOS" sign_path = thumbnailer_appex_path / "Contents" / "MacOS"
worker.utils.info(f"Collecting files to process in {sign_path}") worker.utils.info(f"Collecting files to process in {sign_path}")
sign_darwin_files(builder, list(sign_path.rglob("*")), "thumbnailer_entitlements.plist") sign_darwin_files(
builder, list(sign_path.rglob("*")), "thumbnailer_entitlements.plist"
)
# Shared librarys and Python # Shared librarys and Python
sign_path = bundle_path / "Contents" / "Resources" sign_path = bundle_path / "Contents" / "Resources"

View file

@ -36,7 +36,9 @@ def package_for_upload(builder: worker.blender.CodeBuilder, success: bool) -> No
package_filename = "tests-" + worker.blender.pack.get_package_name(builder) package_filename = "tests-" + worker.blender.pack.get_package_name(builder)
package_filepath = package_tests_dir / package_filename package_filepath = package_tests_dir / package_filename
shutil.copytree(build_tests_dir, package_filepath) shutil.copytree(build_tests_dir, package_filepath)
shutil.make_archive(str(package_filepath), "zip", package_tests_dir, package_filename) shutil.make_archive(
str(package_filepath), "zip", package_tests_dir, package_filename
)
shutil.rmtree(package_filepath) shutil.rmtree(package_filepath)
# Always upload unpacked folder for main and release tracks, # Always upload unpacked folder for main and release tracks,

View file

@ -32,8 +32,12 @@ def update(builder: worker.blender.CodeBuilder) -> None:
make_update_text = make_update_path.read_text() make_update_text = make_update_path.read_text()
if "def svn_update" in make_update_text: if "def svn_update" in make_update_text:
worker.utils.error("Can't build branch or pull request that uses Subversion libraries.") worker.utils.error(
worker.utils.error("Merge with latest main or release branch to use Git LFS libraries.") "Can't build branch or pull request that uses Subversion libraries."
)
worker.utils.error(
"Merge with latest main or release branch to use Git LFS libraries."
)
sys.exit(1) sys.exit(1)
# Run make update # Run make update

View file

@ -14,12 +14,18 @@ class VersionInfo:
# Get version information # Get version information
buildinfo_h = builder.build_dir / "source" / "creator" / "buildinfo.h" buildinfo_h = builder.build_dir / "source" / "creator" / "buildinfo.h"
blender_h = ( blender_h = (
builder.blender_dir / "source" / "blender" / "blenkernel" / "BKE_blender_version.h" builder.blender_dir
/ "source"
/ "blender"
/ "blenkernel"
/ "BKE_blender_version.h"
) )
version_number = int(self._parse_header_file(blender_h, "BLENDER_VERSION")) version_number = int(self._parse_header_file(blender_h, "BLENDER_VERSION"))
version_number_patch = int(self._parse_header_file(blender_h, "BLENDER_VERSION_PATCH")) version_number_patch = int(
self._parse_header_file(blender_h, "BLENDER_VERSION_PATCH")
)
self.major, self.minor, self.patch = ( self.major, self.minor, self.patch = (
version_number // 100, version_number // 100,
version_number % 100, version_number % 100,
@ -38,14 +44,16 @@ class VersionInfo:
self.hash = self._parse_header_file(buildinfo_h, "BUILD_HASH")[1:-1] self.hash = self._parse_header_file(buildinfo_h, "BUILD_HASH")[1:-1]
else: else:
self.hash = "" self.hash = ""
self.risk_id = self.version_cycle.replace("release", "stable").replace("rc", "candidate") self.risk_id = self.version_cycle.replace("release", "stable").replace(
"rc", "candidate"
)
self.is_development_build = self.version_cycle == "alpha" self.is_development_build = self.version_cycle == "alpha"
def _parse_header_file(self, filename: pathlib.Path, define: str) -> str: def _parse_header_file(self, filename: pathlib.Path, define: str) -> str:
regex = re.compile(r"^#\s*define\s+%s\s+(.*)" % define) regex = re.compile(r"^#\s*define\s+%s\s+(.*)" % define)
with open(filename, "r") as file: with open(filename, "r") as file:
for l in file: for line in file:
match = regex.match(l) match = regex.match(line)
if match: if match:
return match.group(1) return match.group(1)

View file

@ -52,7 +52,9 @@ if __name__ == "__main__":
steps["clean"] = worker.deploy.CodeDeployBuilder.clean steps["clean"] = worker.deploy.CodeDeployBuilder.clean
parser = worker.blender.create_argument_parser(steps=steps) parser = worker.blender.create_argument_parser(steps=steps)
parser.add_argument("--store-id", type=str, choices=["snap", "steam", "windows"], required=True) parser.add_argument(
"--store-id", type=str, choices=["snap", "steam", "windows"], required=True
)
args = parser.parse_args() args = parser.parse_args()
builder = worker.deploy.CodeStoreBuilder(args) builder = worker.deploy.CodeStoreBuilder(args)

View file

@ -22,7 +22,9 @@ def get_os_release() -> str:
def get_cpu_info() -> str: def get_cpu_info() -> str:
if platform.system() == "Darwin": if platform.system() == "Darwin":
return worker.utils.check_output(["/usr/sbin/sysctl", "-n", "machdep.cpu.brand_string"]) return worker.utils.check_output(
["/usr/sbin/sysctl", "-n", "machdep.cpu.brand_string"]
)
elif platform.system() == "Linux": elif platform.system() == "Linux":
cpuinfo = pathlib.Path("/proc/cpuinfo").read_text() cpuinfo = pathlib.Path("/proc/cpuinfo").read_text()
for line in cpuinfo.splitlines(): for line in cpuinfo.splitlines():
@ -101,9 +103,9 @@ def clean(builder: worker.utils.Builder) -> None:
sorted_paths: List[Tuple[float, pathlib.Path]] = [] sorted_paths: List[Tuple[float, pathlib.Path]] = []
for delete_path in optional_delete_paths: for delete_path in optional_delete_paths:
try: try:
sorted_paths += [(os.path.getmtime(delete_path), delete_path)] sorted_paths.append((os.path.getmtime(delete_path), delete_path))
except: except (FileNotFoundError, PermissionError) as e:
pass worker.utils.warning(f"Unable to access {delete_path}: {e}")
for _, delete_path in sorted(sorted_paths): for _, delete_path in sorted(sorted_paths):
worker.utils.remove_dir(delete_path) worker.utils.remove_dir(delete_path)
@ -128,7 +130,9 @@ def configure_machine(builder: worker.utils.Builder) -> None:
print(f"Release: {get_os_release()}") print(f"Release: {get_os_release()}")
print(f"Version: {platform.version()}") print(f"Version: {platform.version()}")
print(f"Processor: {processor}") print(f"Processor: {processor}")
print(f"Cores: {psutil.cpu_count()} logical, {psutil.cpu_count(logical=False)} physical") print(
f"Cores: {psutil.cpu_count()} logical, {psutil.cpu_count(logical=False)} physical"
)
print(f"Total Memory: {psutil.virtual_memory().total / (1024**3):.2f} GB") print(f"Total Memory: {psutil.virtual_memory().total / (1024**3):.2f} GB")
print(f"Available Memory: {psutil.virtual_memory().available / (1024**3):.2f} GB") print(f"Available Memory: {psutil.virtual_memory().available / (1024**3):.2f} GB")
@ -194,6 +198,11 @@ def configure_machine(builder: worker.utils.Builder) -> None:
proc.kill() proc.kill()
for proc in psutil.process_iter(): for proc in psutil.process_iter():
if proc.name().lower() in ["blender", "blender.exe", "blender_test", "blender_test.exe"]: if proc.name().lower() in [
"blender",
"blender.exe",
"blender_test",
"blender_test.exe",
]:
worker.utils.warning("Killing stray Blender process") worker.utils.warning("Killing stray Blender process")
proc.kill() proc.kill()

View file

@ -19,10 +19,6 @@ checksums = ["md5", "sha256"]
def pull(builder: worker.deploy.CodeDeployBuilder) -> None: def pull(builder: worker.deploy.CodeDeployBuilder) -> None:
retry_count = 0
retry_delay_in_seconds = 30
timeout_in_seconds = 60
pipeline_category = "daily" pipeline_category = "daily"
if builder.track_id == "vexp": if builder.track_id == "vexp":
pipeline_category = "experimental" pipeline_category = "experimental"
@ -75,7 +71,11 @@ def pull(builder: worker.deploy.CodeDeployBuilder) -> None:
# Prefer more stable builds, to avoid issue when multiple are present. # Prefer more stable builds, to avoid issue when multiple are present.
risk_id_order = ["stable", "candidate", "rc", "beta", "alpha", "edge"] risk_id_order = ["stable", "candidate", "rc", "beta", "alpha", "edge"]
risk = build["risk_id"] risk = build["risk_id"]
risk = risk_id_order.index(risk) if risk in risk_id_order else len(risk_id_order) risk = (
risk_id_order.index(risk)
if risk in risk_id_order
else len(risk_id_order)
)
other_risk = unique_builds[key]["risk_id"] other_risk = unique_builds[key]["risk_id"]
other_risk = ( other_risk = (
risk_id_order.index(other_risk) risk_id_order.index(other_risk)
@ -92,7 +92,9 @@ def pull(builder: worker.deploy.CodeDeployBuilder) -> None:
builds = list(unique_builds.values()) builds = list(unique_builds.values())
if len(builds) == 0: if len(builds) == 0:
raise Exception(f"No builds found for version [{version_info.version}] in [{search_url}]") raise Exception(
f"No builds found for version [{version_info.version}] in [{search_url}]"
)
# Download builds. # Download builds.
worker.utils.remove_dir(builder.download_dir) worker.utils.remove_dir(builder.download_dir)
@ -113,7 +115,9 @@ def pull(builder: worker.deploy.CodeDeployBuilder) -> None:
# Moving to build_package folder # Moving to build_package folder
worker.utils.info(f"Move to [{builder.package_dir}]") worker.utils.info(f"Move to [{builder.package_dir}]")
worker.utils.move(download_file_path, builder.package_dir / download_file_path.name) worker.utils.move(
download_file_path, builder.package_dir / download_file_path.name
)
worker.utils.remove_dir(builder.download_dir) worker.utils.remove_dir(builder.download_dir)
@ -164,7 +168,9 @@ def repackage(builder: worker.deploy.CodeDeployBuilder) -> None:
if file_extension == "zip": if file_extension == "zip":
worker.utils.info(f"Renaming internal folder to [{new_folder_name}]") worker.utils.info(f"Renaming internal folder to [{new_folder_name}]")
worker.utils.call(["7z", "rn", dest_file_path, current_folder_name, new_folder_name]) worker.utils.call(
["7z", "rn", dest_file_path, current_folder_name, new_folder_name]
)
elif file_extension == "tar.xz": elif file_extension == "tar.xz":
worker.utils.info(f"Extracting [{source_file_path}] to [{dest_file_path}]") worker.utils.info(f"Extracting [{source_file_path}] to [{dest_file_path}]")
worker.utils.call(["tar", "-xf", source_file_path, "--directory", "."]) worker.utils.call(["tar", "-xf", source_file_path, "--directory", "."])
@ -198,11 +204,15 @@ def repackage(builder: worker.deploy.CodeDeployBuilder) -> None:
for checksum in checksums: for checksum in checksums:
checksum_text = "" checksum_text = ""
for filepath in checksum_file_paths: for filepath in checksum_file_paths:
checksum_line = worker.utils.check_output([f"{checksum}sum", filepath.name]).strip() checksum_line = worker.utils.check_output(
[f"{checksum}sum", filepath.name]
).strip()
checksum_text += checksum_line + "\n" checksum_text += checksum_line + "\n"
print(checksum_text) print(checksum_text)
checksum_filepath = deployable_path / f"blender-{version_info.version}.{checksum}" checksum_filepath = (
deployable_path / f"blender-{version_info.version}.{checksum}"
)
checksum_filepath.write_text(checksum_text) checksum_filepath.write_text(checksum_text)
@ -218,34 +228,53 @@ def deploy(builder: worker.deploy.CodeDeployBuilder) -> None:
if builder.service_env_id != "PROD": if builder.service_env_id != "PROD":
# Already assumed to exist on production # Already assumed to exist on production
worker.utils.call_ssh(connect_id, ["mkdir", "-p", remote_dest_path], dry_run=dry_run) worker.utils.call_ssh(
connect_id, ["mkdir", "-p", remote_dest_path], dry_run=dry_run
)
for source_path in builder.package_source_dir.iterdir(): for source_path in builder.package_source_dir.iterdir():
dest_path = f"{connect_id}:{remote_dest_path}/" dest_path = f"{connect_id}:{remote_dest_path}/"
worker.utils.info(f"Deploying source package [{source_path}]") worker.utils.info(f"Deploying source package [{source_path}]")
worker.utils.rsync( worker.utils.rsync(
source_path, dest_path, change_modes=change_modes, show_names=True, dry_run=dry_run source_path,
dest_path,
change_modes=change_modes,
show_names=True,
dry_run=dry_run,
) )
worker.utils.call_ssh(connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run) worker.utils.call_ssh(
connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run
)
# Copy binaries # Copy binaries
version_info = worker.blender.version.VersionInfo(builder) version_info = worker.blender.version.VersionInfo(builder)
major_minor_version = version_info.short_version major_minor_version = version_info.short_version
remote_dest_path = ( remote_dest_path = (
pathlib.Path(worker_config.download_release_folder) / f"Blender{major_minor_version}" pathlib.Path(worker_config.download_release_folder)
/ f"Blender{major_minor_version}"
) )
deployable_path = builder.package_dir / "deployable" deployable_path = builder.package_dir / "deployable"
change_modes = ["F0444"] change_modes = ["F0444"]
worker.utils.call_ssh(connect_id, ["mkdir", "-p", remote_dest_path], dry_run=dry_run) worker.utils.call_ssh(
worker.utils.call_ssh(connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run) connect_id, ["mkdir", "-p", remote_dest_path], dry_run=dry_run
)
worker.utils.call_ssh(
connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run
)
for source_path in deployable_path.iterdir(): for source_path in deployable_path.iterdir():
dest_path = f"{connect_id}:{remote_dest_path}/" dest_path = f"{connect_id}:{remote_dest_path}/"
worker.utils.info(f"Deploying binary package [{source_path}]") worker.utils.info(f"Deploying binary package [{source_path}]")
worker.utils.rsync( worker.utils.rsync(
source_path, dest_path, change_modes=change_modes, show_names=True, dry_run=dry_run source_path,
dest_path,
change_modes=change_modes,
show_names=True,
dry_run=dry_run,
) )
worker.utils.call_ssh(connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run) worker.utils.call_ssh(
connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run
)

View file

@ -37,7 +37,9 @@ def monitor(builder: worker.deploy.CodeDeployBuilder) -> None:
stop_on_required_site_found = False stop_on_required_site_found = False
branches_config = builder.get_branches_config() branches_config = builder.get_branches_config()
expected_platforms = branches_config.code_official_platform_architectures[builder.track_id] expected_platforms = branches_config.code_official_platform_architectures[
builder.track_id
]
expected_file_count = len(worker.deploy.artifacts.checksums) expected_file_count = len(worker.deploy.artifacts.checksums)
for expected_platform in expected_platforms: for expected_platform in expected_platforms:
@ -61,7 +63,9 @@ def monitor(builder: worker.deploy.CodeDeployBuilder) -> None:
print(f"Checking [{search_url}] for version [{version_info.version}]") print(f"Checking [{search_url}] for version [{version_info.version}]")
# Header to avoid getting permission denied. # Header to avoid getting permission denied.
request = urllib.request.Request(search_url, headers={"User-Agent": "Mozilla"}) request = urllib.request.Request(
search_url, headers={"User-Agent": "Mozilla"}
)
try: try:
response = urllib.request.urlopen(request, timeout=5.0) response = urllib.request.urlopen(request, timeout=5.0)
@ -71,7 +75,7 @@ def monitor(builder: worker.deploy.CodeDeployBuilder) -> None:
text = "" text = ""
matches = set(re.findall(file_pattern, text)) matches = set(re.findall(file_pattern, text))
found_file_count = len(matches) len(matches)
for match in matches: for match in matches:
print(f"File [{match}]") print(f"File [{match}]")
@ -93,7 +97,9 @@ def monitor(builder: worker.deploy.CodeDeployBuilder) -> None:
print("") print("")
print("=" * 80) print("=" * 80)
print(f"Sites [{found_site_count} of {len(monitored_base_urls)}] have all files") print(
f"Sites [{found_site_count} of {len(monitored_base_urls)}] have all files"
)
print("=" * 80) print("=" * 80)
if found_site_count == len(monitored_base_urls): if found_site_count == len(monitored_base_urls):

View file

@ -54,7 +54,9 @@ def pull(builder: worker.deploy.CodeDeployBuilder) -> None:
# Check expected platforms # Check expected platforms
branches_config = builder.get_branches_config() branches_config = builder.get_branches_config()
expected_platforms = branches_config.code_official_platform_architectures[builder.track_id] expected_platforms = branches_config.code_official_platform_architectures[
builder.track_id
]
if len(expected_platforms) != len(matching_builds): if len(expected_platforms) != len(matching_builds):
platform_names = "\n".join(expected_platforms) platform_names = "\n".join(expected_platforms)
raise Exception("Unexpected number of builds, expected:\n" + platform_names) raise Exception("Unexpected number of builds, expected:\n" + platform_names)
@ -81,7 +83,9 @@ def deliver(builder: worker.deploy.CodeDeployBuilder) -> None:
# Check expected platforms # Check expected platforms
branches_config = builder.get_branches_config() branches_config = builder.get_branches_config()
expected_platforms = branches_config.code_official_platform_architectures[builder.track_id] expected_platforms = branches_config.code_official_platform_architectures[
builder.track_id
]
wheel_names = "\n".join([wheel.name for wheel in wheels]) wheel_names = "\n".join([wheel.name for wheel in wheels])
wheel_paths = [str(wheel) for wheel in wheels] wheel_paths = [str(wheel) for wheel in wheels]
print(wheel_names) print(wheel_names)

View file

@ -51,14 +51,20 @@ def package(builder: worker.deploy.CodeStoreBuilder) -> None:
snap_source_root_path = freedesktop_path / "snap" snap_source_root_path = freedesktop_path / "snap"
blender_icon_file_name = "blender.svg" blender_icon_file_name = "blender.svg"
snapcraft_template_file_path = snap_source_root_path / "blender-snapcraft-template.yaml" snapcraft_template_file_path = (
snap_source_root_path / "blender-snapcraft-template.yaml"
)
worker.utils.info(f"Using snap config file [{snapcraft_template_file_path}]") worker.utils.info(f"Using snap config file [{snapcraft_template_file_path}]")
snapcraft_text = snapcraft_template_file_path.read_text() snapcraft_text = snapcraft_template_file_path.read_text()
snapcraft_text = snapcraft_text.replace("@VERSION@", version_info.version) snapcraft_text = snapcraft_text.replace("@VERSION@", version_info.version)
snapcraft_text = snapcraft_text.replace("@GRADE@", grade) snapcraft_text = snapcraft_text.replace("@GRADE@", grade)
snapcraft_text = snapcraft_text.replace("@ICON_PATH@", f"./{blender_icon_file_name}") snapcraft_text = snapcraft_text.replace(
snapcraft_text = snapcraft_text.replace("@PACKAGE_PATH@", f"./{linux_package_file_path.name}") "@ICON_PATH@", f"./{blender_icon_file_name}"
)
snapcraft_text = snapcraft_text.replace(
"@PACKAGE_PATH@", f"./{linux_package_file_path.name}"
)
snapcraft_file_path = builder.store_snap_dir / "snapcraft.yaml" snapcraft_file_path = builder.store_snap_dir / "snapcraft.yaml"
worker.utils.info(f"Saving snapcraft config file [{snapcraft_file_path}]") worker.utils.info(f"Saving snapcraft config file [{snapcraft_file_path}]")
@ -74,7 +80,9 @@ def package(builder: worker.deploy.CodeStoreBuilder) -> None:
os.chdir(builder.store_snap_dir) os.chdir(builder.store_snap_dir)
# Copy all required files into working folder # Copy all required files into working folder
source_file_path = freedesktop_path / "icons" / "scalable" / "apps" / blender_icon_file_name source_file_path = (
freedesktop_path / "icons" / "scalable" / "apps" / blender_icon_file_name
)
dest_file_path = builder.store_snap_dir / "blender.svg" dest_file_path = builder.store_snap_dir / "blender.svg"
worker.utils.info(f"Copy file [{source_file_path}] -> [{dest_file_path}]") worker.utils.info(f"Copy file [{source_file_path}] -> [{dest_file_path}]")
worker.utils.copy_file(source_file_path, dest_file_path) worker.utils.copy_file(source_file_path, dest_file_path)
@ -87,7 +95,8 @@ def package(builder: worker.deploy.CodeStoreBuilder) -> None:
worker.utils.call(["snapcraft", "clean", "--use-lxd"], dry_run=dry_run) worker.utils.call(["snapcraft", "clean", "--use-lxd"], dry_run=dry_run)
worker.utils.call(["snapcraft", "--use-lxd"], dry_run=dry_run) worker.utils.call(["snapcraft", "--use-lxd"], dry_run=dry_run)
worker.utils.call( worker.utils.call(
["review-tools.snap-review", snap_package_file_path, "--allow-classic"], dry_run=dry_run ["review-tools.snap-review", snap_package_file_path, "--allow-classic"],
dry_run=dry_run,
) )
if dry_run: if dry_run:
@ -110,11 +119,14 @@ def deliver(builder: worker.deploy.CodeStoreBuilder) -> None:
branches_config = builder.get_branches_config() branches_config = builder.get_branches_config()
is_lts = builder.track_id in branches_config.all_lts_tracks is_lts = builder.track_id in branches_config.all_lts_tracks
is_latest = ( is_latest = (
branches_config.track_major_minor_versions[builder.track_id] == version_info.short_version branches_config.track_major_minor_versions[builder.track_id]
== version_info.short_version
) )
# Never push to stable # Never push to stable
snap_risk_id = version_info.risk_id.replace("stable", "candidate").replace("alpha", "edge") snap_risk_id = version_info.risk_id.replace("stable", "candidate").replace(
"alpha", "edge"
)
if snap_risk_id == "stable": if snap_risk_id == "stable":
raise Exception("Delivery to [stable] channel not allowed") raise Exception("Delivery to [stable] channel not allowed")
@ -139,7 +151,9 @@ def deliver(builder: worker.deploy.CodeStoreBuilder) -> None:
worker_config = builder.get_worker_config() worker_config = builder.get_worker_config()
env = os.environ.copy() env = os.environ.copy()
env["SNAPCRAFT_STORE_CREDENTIALS"] = worker_config.snap_credentials(builder.service_env_id) env["SNAPCRAFT_STORE_CREDENTIALS"] = worker_config.snap_credentials(
builder.service_env_id
)
# If this fails, then the permissions were not set correcty with acls # If this fails, then the permissions were not set correcty with acls
worker.utils.call(["snapcraft", "status", "blender"], dry_run=dry_run, env=env) worker.utils.call(["snapcraft", "status", "blender"], dry_run=dry_run, env=env)

View file

@ -9,7 +9,9 @@ import worker.deploy
import worker.utils import worker.utils
def _package(builder: worker.deploy.CodeDeployBuilder, needs_complete: bool = False) -> None: def _package(
builder: worker.deploy.CodeDeployBuilder, needs_complete: bool = False
) -> None:
os.chdir(builder.code_path) os.chdir(builder.code_path)
if needs_complete: if needs_complete:
worker.utils.call(["make", "source_archive_complete"]) worker.utils.call(["make", "source_archive_complete"])

View file

@ -13,7 +13,9 @@ import worker.utils
def extract_file( def extract_file(
builder: worker.deploy.CodeStoreBuilder, source_file_path: pathlib.Path, platform: str builder: worker.deploy.CodeStoreBuilder,
source_file_path: pathlib.Path,
platform: str,
) -> None: ) -> None:
worker.utils.info(f"Extracting artifact [{source_file_path}] for Steam") worker.utils.info(f"Extracting artifact [{source_file_path}] for Steam")
if not source_file_path.exists(): if not source_file_path.exists():
@ -33,7 +35,9 @@ def extract_file(
# Move any folder there as ./content # Move any folder there as ./content
for source_content_path in dest_extract_path.iterdir(): for source_content_path in dest_extract_path.iterdir():
if source_content_path.is_dir(): if source_content_path.is_dir():
worker.utils.info(f"Move [{source_content_path.name}] -> [{dest_content_path}]") worker.utils.info(
f"Move [{source_content_path.name}] -> [{dest_content_path}]"
)
worker.utils.move(source_content_path, dest_content_path) worker.utils.move(source_content_path, dest_content_path)
break break
@ -55,8 +59,12 @@ def extract_file(
worker.utils.remove_file(image_file_path) worker.utils.remove_file(image_file_path)
worker.utils.info(f"Move Blender app from [{source_content_path}] -> [{dest_content_path}]") worker.utils.info(
worker.utils.move(source_content_path / "Blender.app", dest_content_path / "Blender.app") f"Move Blender app from [{source_content_path}] -> [{dest_content_path}]"
)
worker.utils.move(
source_content_path / "Blender.app", dest_content_path / "Blender.app"
)
worker.utils.remove_dir(source_content_path) worker.utils.remove_dir(source_content_path)
elif platform == "windows": elif platform == "windows":
worker.utils.info(f"Extracting zip file [{source_file_path}]") worker.utils.info(f"Extracting zip file [{source_file_path}]")
@ -66,7 +74,9 @@ def extract_file(
# Move any folder there as ./content # Move any folder there as ./content
for source_content_path in dest_extract_path.iterdir(): for source_content_path in dest_extract_path.iterdir():
if source_content_path.is_dir(): if source_content_path.is_dir():
worker.utils.info(f"Move [{source_content_path.name}] -> [{dest_content_path}]") worker.utils.info(
f"Move [{source_content_path.name}] -> [{dest_content_path}]"
)
worker.utils.move(source_content_path, dest_content_path) worker.utils.move(source_content_path, dest_content_path)
break break
else: else:
@ -97,9 +107,10 @@ def build(builder: worker.deploy.CodeStoreBuilder, is_preview: bool) -> None:
version_info = worker.blender.version.VersionInfo(builder) version_info = worker.blender.version.VersionInfo(builder)
branches_config = builder.get_branches_config() branches_config = builder.get_branches_config()
is_lts = builder.track_id in branches_config.all_lts_tracks is_lts = builder.track_id in branches_config.all_lts_tracks
is_latest = branches_config.track_major_minor_versions["vdev"] == version_info.short_version is_latest = (
branches_config.track_major_minor_versions["vdev"] == version_info.short_version
)
track_path = builder.track_path
log_path = builder.track_path / "log" log_path = builder.track_path / "log"
worker.utils.remove_dir(log_path) worker.utils.remove_dir(log_path)
os.makedirs(log_path, exist_ok=True) os.makedirs(log_path, exist_ok=True)

View file

@ -52,12 +52,16 @@ def _package_architecture(
input_file_path = builder.package_dir / build["file_name"] input_file_path = builder.package_dir / build["file_name"]
break break
if not input_file_path: if not input_file_path:
raise Exception(f"Windows package not found in [{builder.package_dir}] manifest") raise Exception(
f"Windows package not found in [{builder.package_dir}] manifest"
)
# Copy all required files into working folder # Copy all required files into working folder
source_path = builder.code_path / "release" / "windows" / "msix" source_path = builder.code_path / "release" / "windows" / "msix"
dest_path = builder.store_windows_dir dest_path = builder.store_windows_dir
worker.utils.info(f"Copying [{source_path}] -> [{dest_path}] for windows store packaging") worker.utils.info(
f"Copying [{source_path}] -> [{dest_path}] for windows store packaging"
)
for source_file in source_path.iterdir(): for source_file in source_path.iterdir():
if source_file.name == "README.md": if source_file.name == "README.md":
@ -104,7 +108,9 @@ def package(builder: worker.deploy.CodeStoreBuilder) -> None:
raise Exception("Can only run this on Windows, aborting") raise Exception("Can only run this on Windows, aborting")
branches_config = builder.get_branches_config() branches_config = builder.get_branches_config()
expected_platforms = branches_config.code_official_platform_architectures[builder.track_id] expected_platforms = branches_config.code_official_platform_architectures[
builder.track_id
]
for expected_platform in expected_platforms: for expected_platform in expected_platforms:
if expected_platform.startswith("windows"): if expected_platform.startswith("windows"):

View file

@ -35,7 +35,9 @@ def download_api_dump_test_data(local_delivery_path: pathlib.Path) -> None:
api_base_url = "https://docs.blender.org/api" api_base_url = "https://docs.blender.org/api"
api_dump_index_url = f"{api_base_url}/api_dump_index.json" api_dump_index_url = f"{api_base_url}/api_dump_index.json"
request = urllib.request.Request(api_dump_index_url, headers={"User-Agent": "Mozilla"}) request = urllib.request.Request(
api_dump_index_url, headers={"User-Agent": "Mozilla"}
)
response = urllib.request.urlopen(request, timeout=5.0) response = urllib.request.urlopen(request, timeout=5.0)
api_dump_index_text = response.read().decode("utf-8", "ignore") api_dump_index_text = response.read().decode("utf-8", "ignore")
@ -48,7 +50,9 @@ def download_api_dump_test_data(local_delivery_path: pathlib.Path) -> None:
api_dump_url = f"{api_base_url}/{version}/api_dump.json" api_dump_url = f"{api_base_url}/{version}/api_dump.json"
worker.utils.info(f"Download {api_dump_url}") worker.utils.info(f"Download {api_dump_url}")
request = urllib.request.Request(api_dump_url, headers={"User-Agent": "Mozilla"}) request = urllib.request.Request(
api_dump_url, headers={"User-Agent": "Mozilla"}
)
response = urllib.request.urlopen(request, timeout=5.0) response = urllib.request.urlopen(request, timeout=5.0)
api_dump_text = response.read().decode("utf-8", "ignore") api_dump_text = response.read().decode("utf-8", "ignore")
@ -97,7 +101,10 @@ def compile_doc(builder: DocApiBuilder) -> None:
dest_path = api_dump_build_path dest_path = api_dump_build_path
worker.utils.rsync( worker.utils.rsync(
source_path, dest_path, include_paths=api_dump_include_paths, exclude_paths=["*"] source_path,
dest_path,
include_paths=api_dump_include_paths,
exclude_paths=["*"],
) )
version = worker.blender.version.VersionInfo(builder).short_version version = worker.blender.version.VersionInfo(builder).short_version
@ -125,7 +132,9 @@ def compile_doc(builder: DocApiBuilder) -> None:
in_path = builder.build_doc_path / "sphinx-in" in_path = builder.build_doc_path / "sphinx-in"
out_path = builder.build_doc_path / "sphinx-out-html" out_path = builder.build_doc_path / "sphinx-out-html"
worker.utils.call(["sphinx-build", "-b", "html", "-j", str(num_threads), in_path, out_path]) worker.utils.call(
["sphinx-build", "-b", "html", "-j", str(num_threads), in_path, out_path]
)
def package(builder: DocApiBuilder) -> None: def package(builder: DocApiBuilder) -> None:
@ -177,7 +186,9 @@ def deliver(builder: DocApiBuilder) -> None:
# Put API dumps data on the server. # Put API dumps data on the server.
api_dump_build_path = f"{builder.build_doc_path}/api_dump/" api_dump_build_path = f"{builder.build_doc_path}/api_dump/"
api_dump_dest_path = f"{connect_id}:{remote_path}/" api_dump_dest_path = f"{connect_id}:{remote_path}/"
worker.utils.rsync(api_dump_build_path, api_dump_dest_path, change_modes=change_modes) worker.utils.rsync(
api_dump_build_path, api_dump_dest_path, change_modes=change_modes
)
# Sync zip package # Sync zip package
if builder.needs_package_delivery: if builder.needs_package_delivery:
@ -189,7 +200,10 @@ def deliver(builder: DocApiBuilder) -> None:
source_file_path = builder.build_doc_path / package_file_name source_file_path = builder.build_doc_path / package_file_name
dest_file_path = f"{connect_id}:{version_remote_path}/{package_file_name}" dest_file_path = f"{connect_id}:{version_remote_path}/{package_file_name}"
worker.utils.rsync( worker.utils.rsync(
source_file_path, dest_file_path, exclude_paths=[".doctrees"], change_modes=change_modes source_file_path,
dest_file_path,
exclude_paths=[".doctrees"],
change_modes=change_modes,
) )
# Create links # Create links
@ -198,16 +212,19 @@ def deliver(builder: DocApiBuilder) -> None:
connect_id, ["ln", "-svF", remote_path / dev_version, remote_path / "dev"] connect_id, ["ln", "-svF", remote_path / dev_version, remote_path / "dev"]
) )
worker.utils.call_ssh( worker.utils.call_ssh(
connect_id, ["ln", "-svF", remote_path / dev_version, remote_path / "master"] connect_id,
["ln", "-svF", remote_path / dev_version, remote_path / "master"],
) )
worker.utils.call_ssh( worker.utils.call_ssh(
connect_id, ["ln", "-svF", remote_path / dev_version, remote_path / "main"] connect_id, ["ln", "-svF", remote_path / dev_version, remote_path / "main"]
) )
worker.utils.call_ssh( worker.utils.call_ssh(
connect_id, ["ln", "-svF", remote_path / latest_version, remote_path / "latest"] connect_id,
["ln", "-svF", remote_path / latest_version, remote_path / "latest"],
) )
worker.utils.call_ssh( worker.utils.call_ssh(
connect_id, ["ln", "-svF", remote_path / latest_version, remote_path / "current"] connect_id,
["ln", "-svF", remote_path / latest_version, remote_path / "current"],
) )

View file

@ -29,7 +29,9 @@ def update(builder: DocDeveloperBuilder) -> None:
def compile_doc(builder: DocDeveloperBuilder) -> None: def compile_doc(builder: DocDeveloperBuilder) -> None:
os.chdir(builder.track_path) os.chdir(builder.track_path)
worker.utils.call_pipenv(["install", "--requirements", builder.code_path / "requirements.txt"]) worker.utils.call_pipenv(
["install", "--requirements", builder.code_path / "requirements.txt"]
)
worker.utils.remove_dir(builder.output_path) worker.utils.remove_dir(builder.output_path)
@ -48,7 +50,9 @@ def deliver(builder: DocDeveloperBuilder) -> None:
remote_path = f"developer.blender.org/webroot/{builder.service_env_id}/docs" remote_path = f"developer.blender.org/webroot/{builder.service_env_id}/docs"
connect_id = f"{worker_config.docs_user}@{worker_config.docs_machine}" connect_id = f"{worker_config.docs_user}@{worker_config.docs_machine}"
server_docs_path = pathlib.Path(worker_config.docs_folder) / pathlib.Path(remote_path) server_docs_path = pathlib.Path(worker_config.docs_folder) / pathlib.Path(
remote_path
)
change_modes = ["D0755", "F0644"] change_modes = ["D0755", "F0644"]
source_path = f"{builder.output_path}/" source_path = f"{builder.output_path}/"

View file

@ -35,7 +35,9 @@ class ManualBuilder(worker.utils.Builder):
if self.needs_all_locales: if self.needs_all_locales:
locale_path = self.code_path / "locale" locale_path = self.code_path / "locale"
locales += [ locales += [
item.name for item in locale_path.iterdir() if not item.name.startswith(".") item.name
for item in locale_path.iterdir()
if not item.name.startswith(".")
] ]
return locales return locales
@ -50,7 +52,9 @@ def update(builder: ManualBuilder) -> None:
def check(builder: ManualBuilder) -> None: def check(builder: ManualBuilder) -> None:
os.chdir(builder.track_path) os.chdir(builder.track_path)
worker.utils.call_pipenv(["install", "--pre", "--requirements", builder.code_path / "requirements.txt"]) worker.utils.call_pipenv(
["install", "--pre", "--requirements", builder.code_path / "requirements.txt"]
)
os.chdir(builder.code_path) os.chdir(builder.code_path)
@ -63,7 +67,9 @@ def check(builder: ManualBuilder) -> None:
def compile_doc(builder: ManualBuilder) -> None: def compile_doc(builder: ManualBuilder) -> None:
# Install requirements. # Install requirements.
os.chdir(builder.track_path) os.chdir(builder.track_path)
worker.utils.call_pipenv(["install", "--pre", "--requirements", builder.code_path / "requirements.txt"]) worker.utils.call_pipenv(
["install", "--pre", "--requirements", builder.code_path / "requirements.txt"]
)
# Determine format and locales # Determine format and locales
locales = builder.get_locales() locales = builder.get_locales()
@ -127,8 +133,12 @@ def compile_doc(builder: ManualBuilder) -> None:
# Hack appropriate versions.json URL into version_switch.js # Hack appropriate versions.json URL into version_switch.js
worker.utils.info("Replacing URL in version_switch.js") worker.utils.info("Replacing URL in version_switch.js")
version_switch_file_path = build_output_path / "_static" / "js" / "version_switch.js" version_switch_file_path = (
versions_file_url = f"https://docs.blender.org/{builder.service_env_id}/versions.json" build_output_path / "_static" / "js" / "version_switch.js"
)
versions_file_url = (
f"https://docs.blender.org/{builder.service_env_id}/versions.json"
)
version_switch_text = version_switch_file_path.read_text() version_switch_text = version_switch_file_path.read_text()
version_switch_text = version_switch_text.replace( version_switch_text = version_switch_text.replace(
@ -229,17 +239,24 @@ def deliver(builder: ManualBuilder) -> None:
dest_path, dest_path,
exclude_paths=[".doctrees", "blender_manual_*.zip"], exclude_paths=[".doctrees", "blender_manual_*.zip"],
delete=True, delete=True,
delete_path_check=str(version_remote_path) delete_path_check=str(version_remote_path),
) )
# Create links # Create links
if builder.track_id == "vdev": if builder.track_id == "vdev":
worker.utils.info(f"Creating links for {locale}") worker.utils.info(f"Creating links for {locale}")
worker.utils.call_ssh( worker.utils.call_ssh(
connect_id, ["ln", "-svF", remote_path / dev_version, remote_path / "dev"] connect_id,
["ln", "-svF", remote_path / dev_version, remote_path / "dev"],
) )
worker.utils.call_ssh( worker.utils.call_ssh(
connect_id, ["ln", "-svF", remote_path / latest_version, remote_path / "latest"] connect_id,
[
"ln",
"-svF",
remote_path / latest_version,
remote_path / "latest",
],
) )
if builder.needs_package_delivery: if builder.needs_package_delivery:
@ -281,7 +298,11 @@ if __name__ == "__main__":
parser.add_argument("--needs-all-locales", action="store_true", required=False) parser.add_argument("--needs-all-locales", action="store_true", required=False)
parser.add_argument("--needs-package-delivery", action="store_true", required=False) parser.add_argument("--needs-package-delivery", action="store_true", required=False)
parser.add_argument( parser.add_argument(
"--doc-format", default="html", type=str, required=False, choices=["html", "epub"] "--doc-format",
default="html",
type=str,
required=False,
choices=["html", "epub"],
) )
args = parser.parse_args() args = parser.parse_args()

View file

@ -44,7 +44,9 @@ def deliver(builder: worker.utils.Builder) -> None:
change_modes = ["D0755", "F0644"] change_modes = ["D0755", "F0644"]
if builder.service_env_id == "LOCAL" and builder.platform == "darwin": if builder.service_env_id == "LOCAL" and builder.platform == "darwin":
worker.utils.warning("rsync change_owner not supported on darwin, ignoring for LOCAL") worker.utils.warning(
"rsync change_owner not supported on darwin, ignoring for LOCAL"
)
change_owner = None change_owner = None
else: else:
change_owner = "buildbot:www-data" change_owner = "buildbot:www-data"

View file

@ -102,7 +102,7 @@ def _log_cmd(msg: str) -> None:
_warnings += [msg] _warnings += [msg]
return return
print(msg.encode('ascii', errors='replace').decode('ascii'), flush=True) print(msg.encode("ascii", errors="replace").decode("ascii"), flush=True)
# Command execution # Command execution
@ -118,7 +118,9 @@ CmdFilterOutput = Optional[Callable[[str], Optional[str]]]
CmdEnvironment = Optional[Dict[str, str]] CmdEnvironment = Optional[Dict[str, str]]
def _prepare_call(cmd: CmdSequence, dry_run: bool = False) -> Sequence[Union[str, pathlib.Path]]: def _prepare_call(
cmd: CmdSequence, dry_run: bool = False
) -> Sequence[Union[str, pathlib.Path]]:
real_cmd: List[Union[str, pathlib.Path]] = [] real_cmd: List[Union[str, pathlib.Path]] = []
log_cmd: List[str] = [] log_cmd: List[str] = []
@ -174,9 +176,9 @@ def call(
if line: if line:
line_str = line.strip("\n\r") line_str = line.strip("\n\r")
if filter_output: if filter_output:
line_str_filter = filter_output(line_str) filter_output(line_str)
else: else:
line_str_filter = line_str pass
if line_str: if line_str:
_log_cmd(line_str) _log_cmd(line_str)
else: else:
@ -206,7 +208,9 @@ def check_output(cmd: CmdSequence, exit_on_error: bool = True) -> str:
sys.stderr.flush() sys.stderr.flush()
try: try:
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, universal_newlines=True) output = subprocess.check_output(
cmd, stderr=subprocess.STDOUT, universal_newlines=True
)
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
if exit_on_error: if exit_on_error:
sys.exit(e.returncode) sys.exit(e.returncode)
@ -255,9 +259,8 @@ def rsync(
if str(dest_path).find(delete_path_check) == -1: if str(dest_path).find(delete_path_check) == -1:
raise Exception("Rsync: remote path must contain '{delete_path_check}'") raise Exception("Rsync: remote path must contain '{delete_path_check}'")
info_options = "progress0,flist0,name0,stats2"
if show_names: if show_names:
info_options = "progress0,flist0,name1,stats2" pass
cmd: List[Union[str, pathlib.Path, HiddenArgument]] = [ cmd: List[Union[str, pathlib.Path, HiddenArgument]] = [
"rsync", "rsync",
@ -294,20 +297,27 @@ def move(path_from: pathlib.Path, path_to: pathlib.Path, dry_run: bool = False)
shutil.move(str(path_from), path_to) shutil.move(str(path_from), path_to)
def copy_dir(path_from: pathlib.Path, path_to: pathlib.Path, dry_run: bool = False) -> None: def copy_dir(
path_from: pathlib.Path, path_to: pathlib.Path, dry_run: bool = False
) -> None:
if dry_run: if dry_run:
return return
shutil.copytree(path_from, path_to) shutil.copytree(path_from, path_to)
def copy_file(path_from: pathlib.Path, path_to: pathlib.Path, dry_run: bool = False) -> None: def copy_file(
path_from: pathlib.Path, path_to: pathlib.Path, dry_run: bool = False
) -> None:
if dry_run: if dry_run:
return return
shutil.copy2(path_from, path_to) shutil.copy2(path_from, path_to)
def remove_file( def remove_file(
path: pathlib.Path, retry_count: int = 3, retry_wait_time: float = 5.0, dry_run: bool = False path: pathlib.Path,
retry_count: int = 3,
retry_wait_time: float = 5.0,
dry_run: bool = False,
) -> None: ) -> None:
if not path.exists(): if not path.exists():
return return
@ -316,52 +326,54 @@ def remove_file(
return return
info(f"Removing {path}") info(f"Removing {path}")
for try_count in range(0, retry_count): for try_count in range(retry_count):
try: try:
try: if path.exists():
if path.exists(): path.unlink()
path.unlink()
except FileNotFoundError:
pass
return return
except: except FileNotFoundError:
# File was already removed by another process.
return
except PermissionError as e:
warning(f"Permission error when removing {path}: {e}")
time.sleep(retry_wait_time)
except OSError as e:
warning(f"OS error when removing {path}: {e}")
time.sleep(retry_wait_time) time.sleep(retry_wait_time)
# Not using missing_ok yet for Python3.6 compatibility. # Final attempt outside the retry loop
try: try:
if path.exists(): if path.exists():
path.unlink() path.unlink()
except FileNotFoundError: except FileNotFoundError:
pass pass
except PermissionError as e:
error(f"Failed to remove {path} due to permission issues: {e}")
except OSError as e:
error(f"Failed to remove {path} after retries due to OS error: {e}")
# Retry several times by default, giving it a chance for possible antivirus to release # Retry several times by default, giving it a chance for possible antivirus to release
# a lock on files in the build folder. Happened for example with MSI files on Windows. # a lock on files in the build folder. Happened for example with MSI files on Windows.
def remove_dir( def remove_dir(
path: pathlib.Path, retry_count: int = 3, retry_wait_time: float = 5.0, dry_run: bool = False path: pathlib.Path, retry_count: int = 3, retry_wait_time: float = 5.0
) -> None: ) -> None:
if not path.exists(): for try_count in range(retry_count):
return
if dry_run:
info(f"Removing {path} (dry run)")
return
info(f"Removing {path}")
for try_count in range(0, retry_count):
try: try:
if path.exists(): if path.exists():
shutil.rmtree(path) shutil.rmtree(path)
return return # Successfully removed, no need to retry
except: except PermissionError as e:
if platform.system().lower() == "windwos": if platform.system().lower() == "windows":
# XXX: Windows builder debug. # Debugging access denied errors on Windows
# Often the `build_package` is failed to be removed because
# of the "Access Denied" error on blender-windows64.msi.
# Run some investigation commands to see what is going on.
if path.name == "build_package": if path.name == "build_package":
info("Removal of package artifacts folder failed. Investigating...") info("Removal of package artifacts folder failed. Investigating...")
msi_path = ( msi_path = (
path / "_CPack_Packages" / "Windows" / "WIX" / "blender-windows64.msi" path
/ "_CPack_Packages"
/ "Windows"
/ "WIX"
/ "blender-windows64.msi"
) )
if msi_path.exists(): if msi_path.exists():
info(f"Information about [{msi_path}]") info(f"Information about [{msi_path}]")
@ -376,11 +388,23 @@ def remove_dir(
) )
else: else:
info(f"MSI package file [{msi_path}] does not exist") info(f"MSI package file [{msi_path}] does not exist")
warning(f"Permission error when removing {path}: {e}")
time.sleep(retry_wait_time)
except FileNotFoundError:
# The directory is already gone; no action needed.
return
except OSError as e:
warning(f"OS error when attempting to remove {path}: {e}")
time.sleep(retry_wait_time) time.sleep(retry_wait_time)
# Final attempt outside of retries
if path.exists(): if path.exists():
shutil.rmtree(path) try:
shutil.rmtree(path)
except PermissionError as e:
error(f"Failed to remove {path} due to permission issues: {e}")
except OSError as e:
error(f"Failed to remove {path} after retries due to OS error: {e}")
def is_tool(name: Union[str, pathlib.Path]) -> bool: def is_tool(name: Union[str, pathlib.Path]) -> bool:
@ -409,7 +433,9 @@ def update_source(
warning("Removing git lock, probably left behind by killed git process") warning("Removing git lock, probably left behind by killed git process")
remove_file(index_lock_path) remove_file(index_lock_path)
for index_lock_path in (code_path / ".git" / "modules").rglob("index.lock"): for index_lock_path in (code_path / ".git" / "modules").rglob("index.lock"):
warning("Removing submodule git lock, probably left behind by killed git process") warning(
"Removing submodule git lock, probably left behind by killed git process"
)
remove_file(index_lock_path) remove_file(index_lock_path)
os.chdir(code_path) os.chdir(code_path)
@ -438,7 +464,15 @@ def update_source(
# Checkout pull request into PR123 branch. # Checkout pull request into PR123 branch.
call(["git", "checkout", "main"]) call(["git", "checkout", "main"])
call(["git", "fetch", "-f", "origin", f"pull/{pull_request_id}/head:{branch_name}"]) call(
[
"git",
"fetch",
"-f",
"origin",
f"pull/{pull_request_id}/head:{branch_name}",
]
)
call(["git", "checkout", branch_name]) call(["git", "checkout", branch_name])
if commit_id and (commit_id != "HEAD"): if commit_id and (commit_id != "HEAD"):