Formatting and linting fixes

This commit is contained in:
Bart van der Braak 2024-11-19 21:59:53 +01:00
parent 0a1454d250
commit d6bce1b39d
32 changed files with 675 additions and 268 deletions

View file

@ -32,12 +32,18 @@ def fetch_authorization(devops_env_id: str):
deploy_dev_usernames = auth_config.deploy_dev_usernames
trusted_dev_usernames = auth_config.trusted_dev_usernames
dev_usernames = list(set(deploy_dev_usernames + trusted_dev_usernames + admin_usernames))
dev_usernames = list(
set(deploy_dev_usernames + trusted_dev_usernames + admin_usernames)
)
deploy_usernames = list(set(deploy_dev_usernames + admin_usernames))
file_based_group_username_role_matchers = [
buildbot.plugins.util.RolesFromUsername(roles=["admin"], usernames=admin_usernames),
buildbot.plugins.util.RolesFromUsername(roles=["deploy"], usernames=deploy_usernames),
buildbot.plugins.util.RolesFromUsername(
roles=["admin"], usernames=admin_usernames
),
buildbot.plugins.util.RolesFromUsername(
roles=["deploy"], usernames=deploy_usernames
),
buildbot.plugins.util.RolesFromUsername(roles=["dev"], usernames=dev_usernames),
]
@ -52,9 +58,15 @@ def fetch_authorization(devops_env_id: str):
# buildbot.plugins.util.AnyEndpointMatcher(role='dev', defaultDeny=False),
# buildbot.plugins.util.AnyEndpointMatcher(role='coordinator', defaultDeny=False),
# buildbot.plugins.util.AnyEndpointMatcher(role='anonymous', defaultDeny=False),
buildbot.plugins.util.StopBuildEndpointMatcher(role="dev", defaultDeny=True),
buildbot.plugins.util.RebuildBuildEndpointMatcher(role="dev", defaultDeny=True),
buildbot.plugins.util.EnableSchedulerEndpointMatcher(role="admin", defaultDeny=True),
buildbot.plugins.util.StopBuildEndpointMatcher(
role="dev", defaultDeny=True
),
buildbot.plugins.util.RebuildBuildEndpointMatcher(
role="dev", defaultDeny=True
),
buildbot.plugins.util.EnableSchedulerEndpointMatcher(
role="admin", defaultDeny=True
),
# buildbot.plugins.util.AnyEndpointMatcher(role='any', defaultDeny=False),
# Force roles
buildbot.plugins.util.ForceBuildEndpointMatcher(
@ -95,10 +107,14 @@ def fetch_authorization(devops_env_id: str):
builder="*-doc-*", role="dev", defaultDeny=True
),
# This also affects starting jobs via force scheduler
buildbot.plugins.util.AnyControlEndpointMatcher(role="admin", defaultDeny=True),
buildbot.plugins.util.AnyControlEndpointMatcher(
role="admin", defaultDeny=True
),
# A default deny for any endpoint if not admin
# If this is missing at the end, any UNMATCHED group will get 'allow'...
buildbot.plugins.util.AnyControlEndpointMatcher(role="admin", defaultDeny=True),
buildbot.plugins.util.AnyControlEndpointMatcher(
role="admin", defaultDeny=True
),
],
roleMatchers=file_based_group_username_role_matchers,
)

View file

@ -31,7 +31,9 @@ def setup_service(devops_env_id: str):
if gitea_api_token:
log.msg("Found Gitea API token, enabling status push")
return gitea.reporter.GiteaStatusService11(gitea_url, gitea_api_token, verbose=False)
return gitea.reporter.GiteaStatusService11(
gitea_url, gitea_api_token, verbose=False
)
else:
log.msg("No Gitea API token found, status push disabled")
return None

View file

@ -81,7 +81,9 @@ class GiteaStatusService11(http.ReporterBase):
yield super().reconfigService(generators=generators, **kwargs)
self.context = context or Interpolate("buildbot/%(prop:buildername)s")
self.context_pr = context_pr or Interpolate("buildbot/pull_request/%(prop:buildername)s")
self.context_pr = context_pr or Interpolate(
"buildbot/pull_request/%(prop:buildername)s"
)
if baseURL.endswith("/"):
baseURL = baseURL[:-1]
self.baseURL = baseURL
@ -107,7 +109,14 @@ class GiteaStatusService11(http.ReporterBase):
]
def createStatus(
self, project_owner, repo_name, sha, state, target_url=None, description=None, context=None
self,
project_owner,
repo_name,
sha,
state,
target_url=None,
description=None,
context=None,
):
"""
:param project_owner: username of the owning user or organization
@ -164,19 +173,25 @@ class GiteaStatusService11(http.ReporterBase):
log.msg(
'Could not send status "{state}" for '
"{repo} at {sha}: {code} : {message}".format(
state=state, repo=repository_name, sha=sha, code=res.code, message=message
state=state,
repo=repository_name,
sha=sha,
code=res.code,
message=message,
)
)
elif self.verbose:
log.msg(
'Status "{state}" sent for '
"{repo} at {sha}.".format(state=state, repo=repository_name, sha=sha)
'Status "{state}" sent for ' "{repo} at {sha}.".format(
state=state, repo=repository_name, sha=sha
)
)
except Exception as e:
log.err(
e,
'Failed to send status "{state}" for '
"{repo} at {sha}".format(state=state, repo=repository_name, sha=sha),
'Failed to send status "{state}" for ' "{repo} at {sha}".format(
state=state, repo=repository_name, sha=sha
),
)
@defer.inlineCallbacks

View file

@ -92,7 +92,9 @@ code_python_module_skip_test_names = ["sign-code-binaries"]
code_tracked_branch_ids = conf.branches.code_tracked_branch_ids
code_track_ids = list(code_tracked_branch_ids.keys())
code_all_platform_architectures = conf.branches.code_all_platform_architectures
code_official_platform_architectures = conf.branches.code_official_platform_architectures
code_official_platform_architectures = (
conf.branches.code_official_platform_architectures
)
code_track_pipeline_types = {}
track_properties = {}
@ -211,7 +213,12 @@ scheduler_properties_patch = [
default=False,
),
buildbot.plugins.util.StringParameter(
name="pull_revision", label="Pull Revision:", required=False, hide=True, size=80, default=""
name="pull_revision",
label="Pull Revision:",
required=False,
hide=True,
size=80,
default="",
),
]
@ -225,12 +232,20 @@ scheduler_properties = {
@buildbot.plugins.util.renderer
def create_code_worker_command_args(props, devops_env_id, track_id, pipeline_type, step_name):
def create_code_worker_command_args(
props, devops_env_id, track_id, pipeline_type, step_name
):
commit_id = pipeline.common.fetch_property(props, key="revision", default="HEAD")
patch_id = pipeline.common.fetch_property(props, key="patch_id", default="")
override_branch_id = pipeline.common.fetch_property(props, key="override_branch_id", default="")
python_module = pipeline.common.fetch_property(props, key="python_module", default=False)
needs_gpu_tests = pipeline.common.fetch_property(props, key="needs_gpu_tests", default=False)
override_branch_id = pipeline.common.fetch_property(
props, key="override_branch_id", default=""
)
python_module = pipeline.common.fetch_property(
props, key="python_module", default=False
)
needs_gpu_tests = pipeline.common.fetch_property(
props, key="needs_gpu_tests", default=False
)
needs_gpu_binaries = pipeline.common.fetch_property(
props, key="needs_gpu_binaries", default=False
)
@ -279,11 +294,12 @@ def create_code_worker_command_args(props, devops_env_id, track_id, pipeline_typ
args += [step_name]
return pipeline.common.create_worker_command("code.py", devops_env_id, track_id, args)
return pipeline.common.create_worker_command(
"code.py", devops_env_id, track_id, args
)
def needs_do_code_pipeline_step(step):
build = step.build
# Use this to test master steps only, otherwise we be waiting for 30 minutes
needs_master_steps_only = False
@ -291,9 +307,7 @@ def needs_do_code_pipeline_step(step):
is_master_step = step.name in pipeline.common.code_pipeline_master_step_names
return is_master_step
worker = step.worker
worker_name = step.getWorkerName()
worker_system = worker.worker_system
step.getWorkerName()
is_package_delivery_step = (step.name in code_delivery_step_names) or (
step.name in pipeline.common.code_pipeline_master_step_names
@ -337,7 +351,9 @@ class LinkMultipleFileUpload(plugins_steps.MultipleFileUpload):
def create_deliver_code_binaries_step(worker_config, track_id, pipeline_type):
file_size_in_mb = 500 * 1024 * 1024
worker_source_path = pathlib.Path(f"../../../../git/blender-{track_id}/build_package")
worker_source_path = pathlib.Path(
f"../../../../git/blender-{track_id}/build_package"
)
master_dest_path = pathlib.Path(
f"{worker_config.buildbot_download_folder}/{pipeline_type}"
).expanduser()
@ -359,7 +375,9 @@ def create_deliver_code_binaries_step(worker_config, track_id, pipeline_type):
def create_deliver_test_results_step(worker_config, track_id, pipeline_type):
file_size_in_mb = 500 * 1024 * 1024
worker_source_path = pathlib.Path(f"../../../../git/blender-{track_id}/build_package")
worker_source_path = pathlib.Path(
f"../../../../git/blender-{track_id}/build_package"
)
master_dest_path = pathlib.Path(
f"{worker_config.buildbot_download_folder}/{pipeline_type}"
).expanduser()
@ -456,9 +474,13 @@ def populate(devops_env_id):
print(f"Creating [{track_id}] [code] [{pipeline_type}] pipeline steps")
for step_name in step_names:
if step_name == "deliver-code-binaries":
step = create_deliver_code_binaries_step(worker_config, track_id, pipeline_type)
step = create_deliver_code_binaries_step(
worker_config, track_id, pipeline_type
)
elif step_name == "deliver-test-results":
step = create_deliver_test_results_step(worker_config, track_id, pipeline_type)
step = create_deliver_test_results_step(
worker_config, track_id, pipeline_type
)
else:
needs_halt_on_failure = True
if step_name in code_pipeline_test_step_names:
@ -488,8 +510,14 @@ def populate(devops_env_id):
pipeline_build_factory.addStep(step)
for master_step_name in pipeline.common.code_pipeline_master_step_names:
master_step_command = pipeline.common.create_master_command_args.withArgs(
devops_env_id, track_id, pipeline_type, master_step_name, single_platform=True
master_step_command = (
pipeline.common.create_master_command_args.withArgs(
devops_env_id,
track_id,
pipeline_type,
master_step_name,
single_platform=True,
)
)
# Master to archive and purge builds
@ -528,7 +556,9 @@ def populate(devops_env_id):
# Create builders.
for platform_architecture in code_all_platform_architectures[track_id]:
print(f"Creating [{track_id}] [{pipeline_type}] [{platform_architecture}] builders")
print(
f"Creating [{track_id}] [{pipeline_type}] [{platform_architecture}] builders"
)
worker_group_id = f"{platform_architecture}-code"
worker_group_id_gpu = f"{platform_architecture}-code-gpu"
@ -544,30 +574,36 @@ def populate(devops_env_id):
# Assigning different workers for different tracks, specifically Linux builders.
suitable_pipeline_worker_names = pipeline_worker_names
if platform_architecture == "linux-x86_64" and devops_env_id != "LOCAL":
if (
platform_architecture == "linux-x86_64"
and devops_env_id != "LOCAL"
):
selector = "rocky"
suitable_pipeline_worker_names = [
worker for worker in pipeline_worker_names if selector in worker
worker
for worker in pipeline_worker_names
if selector in worker
]
builders += [
buildbot.plugins.util.BuilderConfig(
name=pipeline_builder_name,
workernames=suitable_pipeline_worker_names,
nextWorker=partial(next_worker_code, pipeline_worker_names_gpu),
nextWorker=partial(
next_worker_code, pipeline_worker_names_gpu
),
tags=pipeline_builder_tags,
factory=pipeline_build_factory,
)
]
pipeline_scheduler_name = (
f"{track_id}-code-{pipeline_type}-{platform_architecture}-triggerable"
)
pipeline_scheduler_name = f"{track_id}-code-{pipeline_type}-{platform_architecture}-triggerable"
triggerable_scheduler_names += [pipeline_scheduler_name]
schedulers += [
plugins_schedulers.Triggerable(
name=pipeline_scheduler_name, builderNames=[pipeline_builder_name]
name=pipeline_scheduler_name,
builderNames=[pipeline_builder_name],
)
]
@ -590,12 +626,15 @@ def populate(devops_env_id):
)
]
pipeline_scheduler_name = f"{track_id}-code-{pipeline_type}-lint-triggerable"
pipeline_scheduler_name = (
f"{track_id}-code-{pipeline_type}-lint-triggerable"
)
triggerable_scheduler_names += [pipeline_scheduler_name]
schedulers += [
plugins_schedulers.Triggerable(
name=pipeline_scheduler_name, builderNames=[pipeline_builder_name]
name=pipeline_scheduler_name,
builderNames=[pipeline_builder_name],
)
]
@ -603,39 +642,55 @@ def populate(devops_env_id):
if triggerable_scheduler_names:
trigger_properties = {
"python_module": buildbot.plugins.util.Property("python_module"),
"needs_full_clean": buildbot.plugins.util.Property("needs_full_clean"),
"needs_full_clean": buildbot.plugins.util.Property(
"needs_full_clean"
),
"needs_package_delivery": buildbot.plugins.util.Property(
"needs_package_delivery"
),
"needs_gpu_binaries": buildbot.plugins.util.Property("needs_gpu_binaries"),
"needs_gpu_tests": buildbot.plugins.util.Property("needs_gpu_tests"),
"needs_skip_tests": buildbot.plugins.util.Property("needs_skip_tests"),
"needs_gpu_binaries": buildbot.plugins.util.Property(
"needs_gpu_binaries"
),
"needs_gpu_tests": buildbot.plugins.util.Property(
"needs_gpu_tests"
),
"needs_skip_tests": buildbot.plugins.util.Property(
"needs_skip_tests"
),
"platform_architectures": buildbot.plugins.util.Property(
"platform_architectures"
),
}
if pipeline_type == "patch":
trigger_properties["patch_id"] = buildbot.plugins.util.Property("patch_id")
trigger_properties["revision"] = buildbot.plugins.util.Property("revision")
trigger_properties["build_configuration"] = buildbot.plugins.util.Property(
"build_configuration"
trigger_properties["patch_id"] = buildbot.plugins.util.Property(
"patch_id"
)
trigger_properties["revision"] = buildbot.plugins.util.Property(
"revision"
)
trigger_properties["build_configuration"] = (
buildbot.plugins.util.Property("build_configuration")
)
trigger_factory.addStep(
plugins_steps.SetProperties(
name="get-revision", properties=gitea.blender.get_patch_revision
name="get-revision",
properties=gitea.blender.get_patch_revision,
)
)
elif pipeline_type == "experimental":
trigger_properties["override_branch_id"] = buildbot.plugins.util.Property(
"override_branch_id"
trigger_properties["override_branch_id"] = (
buildbot.plugins.util.Property("override_branch_id")
)
trigger_properties["revision"] = buildbot.plugins.util.Property("revision")
trigger_properties["build_configuration"] = buildbot.plugins.util.Property(
"build_configuration"
trigger_properties["revision"] = buildbot.plugins.util.Property(
"revision"
)
trigger_properties["build_configuration"] = (
buildbot.plugins.util.Property("build_configuration")
)
trigger_factory.addStep(
plugins_steps.SetProperties(
name="get-revision", properties=gitea.blender.get_branch_revision
name="get-revision",
properties=gitea.blender.get_branch_revision,
)
)
@ -650,7 +705,9 @@ def populate(devops_env_id):
)
)
coordinator_builder_name = f"{track_id}-code-{pipeline_type}-coordinator"
coordinator_builder_name = (
f"{track_id}-code-{pipeline_type}-coordinator"
)
builder_tags = coordinator_builder_name.split("-")
builders += [
@ -662,7 +719,9 @@ def populate(devops_env_id):
)
]
coordinator_scheduler_name = f"{track_id}-code-{pipeline_type}-coordinator-force"
coordinator_scheduler_name = (
f"{track_id}-code-{pipeline_type}-coordinator-force"
)
schedulers += [
plugins_schedulers.ForceScheduler(
name=coordinator_scheduler_name,
@ -701,7 +760,8 @@ def populate(devops_env_id):
}
change_filter = buildbot.plugins.util.ChangeFilter(
project=["blender.git"], branch=code_tracked_branch_ids[track_id]
project=["blender.git"],
branch=code_tracked_branch_ids[track_id],
)
schedulers += [
plugins_schedulers.SingleBranchScheduler(
@ -724,7 +784,9 @@ def populate(devops_env_id):
"needs_package_delivery": True,
"needs_gpu_binaries": True,
"build_configuration": "release",
"platform_architectures": code_all_platform_architectures[track_id],
"platform_architectures": code_all_platform_architectures[
track_id
],
}
nightly_codebases = {
"blender.git": {

View file

@ -41,7 +41,9 @@ scheduler_properties = [
def create_deliver_binaries_windows_step(worker_config, track_id, pipeline_type):
# Create step for uploading msix to download.blender.org.
file_size_in_mb = 500 * 1024 * 1024
worker_source_path = pathlib.Path(f"../../../../git/blender-{track_id}/build_package")
worker_source_path = pathlib.Path(
f"../../../../git/blender-{track_id}/build_package"
)
master_dest_path = pathlib.Path(
f"{worker_config.buildbot_download_folder}/{pipeline_type}"
).expanduser()
@ -122,8 +124,14 @@ def populate(devops_env_id):
pipeline_build_factory.addStep(step)
for master_step_name in pipeline.common.code_pipeline_master_step_names:
master_step_command = pipeline.common.create_master_command_args.withArgs(
devops_env_id, track_id, pipeline_type, master_step_name, single_platform=False
master_step_command = (
pipeline.common.create_master_command_args.withArgs(
devops_env_id,
track_id,
pipeline_type,
master_step_name,
single_platform=False,
)
)
# Master to archive and purge builds

View file

@ -81,9 +81,10 @@ def create_worker_command(script, devops_env_id, track_id, args):
def create_master_command_args(
props, devops_env_id, track_id, pipeline_type, step_name, single_platform
):
build_configuration = fetch_property(props, key="build_configuration", default="release")
build_configuration = fetch_property(
props, key="build_configuration", default="release"
)
python_module = fetch_property(props, key="python_module", default=False)
python_module_string = "true" if python_module else "false"
args = [
"--pipeline-type",
@ -181,8 +182,12 @@ def create_pipeline(
platform_worker_names = conf.machines.fetch_platform_worker_names(devops_env_id)
local_worker_names = conf.machines.fetch_local_worker_names()
needs_incremental_schedulers = incremental_properties is not None and devops_env_id in ["PROD"]
needs_nightly_schedulers = nightly_properties is not None and devops_env_id in ["PROD"]
needs_incremental_schedulers = (
incremental_properties is not None and devops_env_id in ["PROD"]
)
needs_nightly_schedulers = nightly_properties is not None and devops_env_id in [
"PROD"
]
track_ids = tracked_branch_ids.keys()
print(f"*** Creating [{artifact_id}] pipeline")
@ -297,7 +302,9 @@ def create_pipeline(
]
if needs_incremental_schedulers and (track_id in track_ids):
incremental_scheduler_name = f"{track_id}-{artifact_id}-coordinator-incremental"
incremental_scheduler_name = (
f"{track_id}-{artifact_id}-coordinator-incremental"
)
change_filter = buildbot.plugins.util.ChangeFilter(
project=[codebase], branch=tracked_branch_ids[track_id]
)

View file

@ -37,7 +37,10 @@ def populate(devops_env_id):
["linux-x86_64-general", "linux-x86_64-general"],
variation_property="doc_format",
variations=["html", "epub"],
incremental_properties={"needs_package_delivery": True, "needs_all_locales": False},
incremental_properties={
"needs_package_delivery": True,
"needs_all_locales": False,
},
nightly_properties={"needs_package_delivery": True, "needs_all_locales": True},
tree_stable_timer_in_seconds=15 * 60,
do_step_if=pipeline.common.needs_do_doc_pipeline_step,

View file

@ -52,7 +52,8 @@ class ArchiveBuilder(worker.utils.Builder):
def file_age_in_days(file_path: pathlib.Path) -> float:
try:
file_path_mtime = os.path.getmtime(file_path)
except:
except (FileNotFoundError, PermissionError) as e:
print(f"Error accessing file: {e}")
return 0.0
age_in_seconds = time.time() - file_path_mtime
@ -60,7 +61,6 @@ def file_age_in_days(file_path: pathlib.Path) -> float:
def parse_build_info(file_path: pathlib.Path) -> Optional[Dict]:
file_name = file_path.name
matches = re.match(package_file_pattern, file_path.name)
if not matches:
return None
@ -92,8 +92,6 @@ def fetch_current_builds(
short_version: Optional[str] = None,
all_platforms: bool = False,
) -> Dict[Any, List[Any]]:
app_id = "bpy" if builder.python_module else "blender"
worker_config = builder.get_worker_config()
download_path = worker_config.buildbot_download_folder
pipeline_build_path = download_path / pipeline_type
@ -109,11 +107,16 @@ def fetch_current_builds(
build_info = parse_build_info(file_path)
if not build_info:
continue
if short_version and not build_info["version_id"].startswith(short_version + "."):
if short_version and not build_info["version_id"].startswith(
short_version + "."
):
continue
if not all_platforms:
if builder.architecture and build_info["architecture"] != builder.architecture:
if (
builder.architecture
and build_info["architecture"] != builder.architecture
):
continue
if builder.platform_id and build_info["platform_id"] != builder.platform_id:
continue
@ -174,9 +177,13 @@ def deduplicate(builder: ArchiveBuilder) -> None:
short_version = branches_config.track_major_minor_versions[builder.track_id]
if not short_version:
raise BaseException(f"Missing version in [{builder.pipeline_type}] builds, aborting")
raise BaseException(
f"Missing version in [{builder.pipeline_type}] builds, aborting"
)
build_groups = fetch_current_builds(builder, builder.pipeline_type, short_version=short_version)
build_groups = fetch_current_builds(
builder, builder.pipeline_type, short_version=short_version
)
print(
f"Deduplicating [{builder.pipeline_type}] builds for [{short_version}] [{builder.build_configuration}] [{builder.platform_id}] [{builder.architecture}]"
@ -217,14 +224,20 @@ def purge(builder: ArchiveBuilder) -> None:
if pipeline_type != "daily":
print("=" * 120)
print(f"Deduplicating [{pipeline_type}] builds")
build_groups = fetch_current_builds(builder, pipeline_type, all_platforms=True)
build_groups = fetch_current_builds(
builder, pipeline_type, all_platforms=True
)
for key, build_group in build_groups.items():
print("")
print("--- Group: " + str(key))
archive_build_group(build_group, builds_retention_in_days, dry_run=dry_run)
archive_build_group(
build_group, builds_retention_in_days, dry_run=dry_run
)
print("=" * 120)
print(f"Purging [{pipeline_type}] builds older than [{builds_retention_in_days}] days")
print(
f"Purging [{pipeline_type}] builds older than [{builds_retention_in_days}] days"
)
for file_path in fetch_purge_builds(builder, pipeline_type, "archive"):
if file_age_in_days(file_path) < builds_retention_in_days:
continue
@ -237,7 +250,9 @@ def purge(builder: ArchiveBuilder) -> None:
worker.utils.remove_file(checksum_file_path, dry_run=dry_run)
print("=" * 120)
print(f"Purging [{pipeline_type}] tests older than [{tests_retention_in_days}] days")
print(
f"Purging [{pipeline_type}] tests older than [{tests_retention_in_days}] days"
)
for file_path in fetch_purge_builds(builder, pipeline_type, "tests"):
if file_age_in_days(file_path) < tests_retention_in_days:
continue
@ -256,7 +271,7 @@ def generate_test_data(builder: ArchiveBuilder) -> None:
branches_config = builder.get_branches_config()
short_version = branches_config.track_major_minor_versions[builder.track_id]
version = short_version + ".0"
short_version + ".0"
app_id = "bpy" if builder.python_module else "blender"
commit_hashes = ["1ddf858", "03a2a53"]
@ -319,9 +334,15 @@ if __name__ == "__main__":
parser = worker.utils.create_argument_parser(steps=steps)
parser.add_argument(
"--pipeline-type", default="daily", type=str, choices=pipeline_types, required=False
"--pipeline-type",
default="daily",
type=str,
choices=pipeline_types,
required=False,
)
parser.add_argument(
"--platform-id", default="", type=str, choices=platforms, required=False
)
parser.add_argument("--platform-id", default="", type=str, choices=platforms, required=False)
parser.add_argument(
"--architecture", default="", type=str, choices=architectures, required=False
)

View file

@ -9,9 +9,6 @@ import pathlib
import re
import subprocess
from collections import OrderedDict
from typing import Callable, Any
import worker.utils
@ -32,7 +29,9 @@ class CodeBuilder(worker.utils.Builder):
self.architecture = args.architecture
if self.platform == "darwin":
self.build_dir = track_path / f"build_{self.architecture}_{self.build_configuration}"
self.build_dir = (
track_path / f"build_{self.architecture}_{self.build_configuration}"
)
else:
self.build_dir = track_path / f"build_{self.build_configuration}"
@ -47,7 +46,9 @@ class CodeBuilder(worker.utils.Builder):
worker.utils.remove_dir(self.build_doc_path)
# Call command with in compiler environment.
def call(self, cmd: worker.utils.CmdSequence, env: worker.utils.CmdEnvironment = None) -> int:
def call(
self, cmd: worker.utils.CmdSequence, env: worker.utils.CmdEnvironment = None
) -> int:
cmd_prefix: worker.utils.CmdList = []
if self.platform == "darwin":
@ -57,11 +58,16 @@ class CodeBuilder(worker.utils.Builder):
xcode_version = xcode.get("version", None) if xcode else None
if xcode_version:
developer_dir = f"/Applications/Xcode-{xcode_version}.app/Contents/Developer"
developer_dir = (
f"/Applications/Xcode-{xcode_version}.app/Contents/Developer"
)
else:
developer_dir = "/Applications/Xcode.app/Contents/Developer"
if self.service_env_id == "LOCAL" and not pathlib.Path(developer_dir).exists():
if (
self.service_env_id == "LOCAL"
and not pathlib.Path(developer_dir).exists()
):
worker.utils.warning(
f"Skip using non-existent {developer_dir} in LOCAL service environment"
)
@ -84,7 +90,9 @@ class CodeBuilder(worker.utils.Builder):
return worker.utils.call(cmd_prefix + list(cmd), env=env)
def pipeline_config(self) -> dict:
config_file_path = self.code_path / "build_files" / "config" / "pipeline_config.json"
config_file_path = (
self.code_path / "build_files" / "config" / "pipeline_config.json"
)
if not config_file_path.exists():
config_file_path = config_file_path.with_suffix(".yaml")
if not config_file_path.exists():
@ -116,7 +124,9 @@ class CodeBuilder(worker.utils.Builder):
# CMake goes first to avoid using chocolaty cpack command.
worker.utils.info("Setting CMake path")
os.environ["PATH"] = "C:\\Program Files\\CMake\\bin" + os.pathsep + os.environ["PATH"]
os.environ["PATH"] = (
"C:\\Program Files\\CMake\\bin" + os.pathsep + os.environ["PATH"]
)
worker.utils.info("Setting VC Tools env variables")
windows_build_version = "10.0.19041.0"
@ -126,7 +136,9 @@ class CodeBuilder(worker.utils.Builder):
+ os.environ["PATH"]
)
os.environ["PATH"] = (
"C:\\Program Files (x86)\\WiX Toolset v3.11\\bin" + os.pathsep + os.environ["PATH"]
"C:\\Program Files (x86)\\WiX Toolset v3.11\\bin"
+ os.pathsep
+ os.environ["PATH"]
)
if self.architecture == "arm64":
@ -140,7 +152,9 @@ class CodeBuilder(worker.utils.Builder):
)
vs_tool_install_dir_suffix = "\\bin\\Hostx64\\x64"
vcvars_output = subprocess.check_output([vs_build_tool_path, "&&", "set"], shell=True)
vcvars_output = subprocess.check_output(
[vs_build_tool_path, "&&", "set"], shell=True
)
vcvars_text = vcvars_output.decode("utf-8", "ignore")
for line in vcvars_text.splitlines():

View file

@ -14,7 +14,6 @@ import worker.blender
import worker.utils
def create_upload(
builder: worker.blender.CodeBuilder, benchmark_path: pathlib.Path, revision: str
) -> None:

View file

@ -95,7 +95,9 @@ def estimate_dmg_size(app_bundles: typing.List[pathlib.Path]) -> int:
return app_bundles_size + _extra_dmg_size_in_bytes
def copy_app_bundles(app_bundles: typing.List[pathlib.Path], dir_path: pathlib.Path) -> None:
def copy_app_bundles(
app_bundles: typing.List[pathlib.Path], dir_path: pathlib.Path
) -> None:
"""
Copy all bundles to a given directory
@ -122,7 +124,9 @@ def get_main_app_bundle(app_bundles: typing.List[pathlib.Path]) -> pathlib.Path:
def create_dmg_image(
app_bundles: typing.List[pathlib.Path], dmg_file_path: pathlib.Path, volume_name: str
app_bundles: typing.List[pathlib.Path],
dmg_file_path: pathlib.Path,
volume_name: str,
) -> None:
"""
Create DMG disk image and put app bundles in it
@ -134,7 +138,9 @@ def create_dmg_image(
worker.utils.remove_file(dmg_file_path)
temp_content_path = tempfile.TemporaryDirectory(prefix="blender-dmg-content-")
worker.utils.info(f"Preparing directory with app bundles for the DMG [{temp_content_path}]")
worker.utils.info(
f"Preparing directory with app bundles for the DMG [{temp_content_path}]"
)
with temp_content_path as content_dir_str:
# Copy all bundles to a clean directory.
content_dir_path = pathlib.Path(content_dir_str)
@ -236,13 +242,17 @@ def eject_volume(volume_name: str) -> None:
if tokens[1] != "on":
continue
if device:
raise Exception(f"Multiple devices found for mounting point [{mount_directory}]")
raise Exception(
f"Multiple devices found for mounting point [{mount_directory}]"
)
device = tokens[0]
if not device:
raise Exception(f"No device found for mounting point [{mount_directory}]")
worker.utils.info(f"[{mount_directory}] is mounted as device [{device}], ejecting...")
worker.utils.info(
f"[{mount_directory}] is mounted as device [{device}], ejecting..."
)
command = ["diskutil", "eject", device]
worker.utils.call(command)
@ -297,7 +307,9 @@ def run_applescript_file_path(
needs_run_applescript = True
if not needs_run_applescript:
worker.utils.info(f"Having issues with apple script on [{architecture}], skipping !")
worker.utils.info(
f"Having issues with apple script on [{architecture}], skipping !"
)
return
temp_script_file_path = tempfile.NamedTemporaryFile(mode="w", suffix=".applescript")
@ -316,8 +328,12 @@ def run_applescript_file_path(
if not background_image_file_path:
continue
else:
background_image_short = f".background:{background_image_file_path.name}"
line = re.sub('to file ".*"', f'to file "{background_image_short}"', line)
background_image_short = (
f".background:{background_image_file_path.name}"
)
line = re.sub(
'to file ".*"', f'to file "{background_image_short}"', line
)
line = line.replace("blender.app", main_app_bundle.name)
stripped_line = line.rstrip("\r\n")
worker.utils.info(f"line={stripped_line}")
@ -343,7 +359,9 @@ def run_applescript_file_path(
time.sleep(5)
def compress_dmg(writable_dmg_file_path: pathlib.Path, final_dmg_file_path: pathlib.Path) -> None:
def compress_dmg(
writable_dmg_file_path: pathlib.Path, final_dmg_file_path: pathlib.Path
) -> None:
"""
Compress temporary read-write DMG
"""
@ -469,5 +487,9 @@ def bundle(
worker.utils.info(f"Will produce DMG [{dmg_file_path.name}]")
create_final_dmg(
app_bundles, dmg_file_path, background_image_file_path, volume_name, applescript_file_path
app_bundles,
dmg_file_path,
background_image_file_path,
volume_name,
applescript_file_path,
)

View file

@ -41,15 +41,21 @@ def fetch_ideal_cpu_count(estimate_core_memory_in_mb: int) -> int:
worker.utils.info(f"usable_memory_in_bytes={usable_memory_in_bytes}")
estimate_memory_per_code_in_bytes = estimate_core_memory_in_mb * 1024 * 1024
worker.utils.info(f"estimate_memory_per_code_in_bytes={estimate_memory_per_code_in_bytes}")
worker.utils.info(
f"estimate_memory_per_code_in_bytes={estimate_memory_per_code_in_bytes}"
)
capable_cpu_count = int(total_memory_in_bytes / estimate_memory_per_code_in_bytes)
capable_cpu_count = int(
total_memory_in_bytes / estimate_memory_per_code_in_bytes
)
worker.utils.info(f"capable_cpu_count={capable_cpu_count}")
min_cpu_count = min(total_cpu_count, capable_cpu_count)
worker.utils.info(f"min_cpu_count={min_cpu_count}")
ideal_cpu_count = min_cpu_count if min_cpu_count <= 8 else min_cpu_count - spare_cpu_count
ideal_cpu_count = (
min_cpu_count if min_cpu_count <= 8 else min_cpu_count - spare_cpu_count
)
worker.utils.info(f"ideal_cpu_count={ideal_cpu_count}")
return ideal_cpu_count
@ -88,9 +94,13 @@ def get_cmake_options(builder: worker.blender.CodeBuilder) -> worker.utils.CmdSe
platform_config_file_path = "build_files/buildbot/config/blender_windows.cmake"
if platform_config_file_path:
worker.utils.info(f'Trying platform-specific buildbot configuration "{platform_config_file_path}"')
worker.utils.info(
f'Trying platform-specific buildbot configuration "{platform_config_file_path}"'
)
if (Path(builder.blender_dir) / platform_config_file_path).exists():
worker.utils.info(f'Using platform-specific buildbot configuration "{platform_config_file_path}"')
worker.utils.info(
f'Using platform-specific buildbot configuration "{platform_config_file_path}"'
)
config_file_path = platform_config_file_path
else:
worker.utils.info(f'Using generic buildbot configuration "{config_file_path}"')
@ -145,13 +155,17 @@ def get_cmake_options(builder: worker.blender.CodeBuilder) -> worker.utils.CmdSe
vc_tool_install_path = pathlib.PureWindowsPath(vc_tools_install_dir)
if builder.architecture == "arm64":
compiler_file_path="C:/Program Files/LLVM/bin/clang-cl.exe"
compiler_file_path="C:/Program Files/LLVM/bin/clang-cl.exe"
linker_file_path="C:/Program Files/LLVM/bin/lld-link.exe"
compiler_file_path = "C:/Program Files/LLVM/bin/clang-cl.exe"
compiler_file_path = "C:/Program Files/LLVM/bin/clang-cl.exe"
linker_file_path = "C:/Program Files/LLVM/bin/lld-link.exe"
else:
vs_tool_install_dir_suffix = "bin/Hostx64/x64"
compiler_file_path = str(vc_tool_install_path / f"{vs_tool_install_dir_suffix}/cl.exe")
linker_file_path = str(vc_tool_install_path / f"{vs_tool_install_dir_suffix}/link.exe")
compiler_file_path = str(
vc_tool_install_path / f"{vs_tool_install_dir_suffix}/cl.exe"
)
linker_file_path = str(
vc_tool_install_path / f"{vs_tool_install_dir_suffix}/link.exe"
)
options += ["-G", "Ninja"]
# -DWITH_WINDOWS_SCCACHE=On
@ -194,7 +208,9 @@ def get_cmake_options(builder: worker.blender.CodeBuilder) -> worker.utils.CmdSe
for cmake_key in cmake_overrides.keys():
for restricted_key_pattern in restricted_key_patterns:
if restricted_key_pattern in cmake_key:
raise Exception(f"CMake key [{cmake_key}] cannot be overriden, aborting")
raise Exception(
f"CMake key [{cmake_key}] cannot be overriden, aborting"
)
for cmake_key, cmake_value in cmake_overrides.items():
options += [f"-D{cmake_key}={cmake_value}"]
@ -238,7 +254,9 @@ def get_cmake_options(builder: worker.blender.CodeBuilder) -> worker.utils.CmdSe
ocloc_version = "dev_01"
options += [f"-DHIP_ROOT_DIR=C:/ProgramData/AMD/HIP/hip_sdk_{hip_version}"]
options += ["-DHIP_PERL_DIR=C:/ProgramData/AMD/HIP/strawberry/perl/bin"]
options += [f"-DOCLOC_INSTALL_DIR=C:/ProgramData/Intel/ocloc/ocloc_{ocloc_version}"]
options += [
f"-DOCLOC_INSTALL_DIR=C:/ProgramData/Intel/ocloc/ocloc_{ocloc_version}"
]
elif builder.platform == "linux":
# CUDA on Linux
options += [f"-DWITH_CYCLES_CUDA_BINARIES={with_gpu_binaries_state}"]
@ -300,22 +318,20 @@ def get_cmake_options(builder: worker.blender.CodeBuilder) -> worker.utils.CmdSe
options += [
f"-DHIPRT_ROOT_DIR:PATH={hiprt_base_dir}/hiprtsdk-{hiprt_version}/hiprt{hiprt_version}"
]
# Enable option to verify enabled libraries and features did not get disabled.
options += ["-DWITH_STRICT_BUILD_OPTIONS=ON"]
needs_cuda_compile = builder.needs_gpu_binaries
if builder.needs_gpu_binaries:
try:
cuda10_version = buildbotConfig["cuda10"]["version"]
except:
except KeyError:
cuda10_version = buildbotConfig["sdks"]["cuda10"]["version"]
cuda10_folder_version = ".".join(cuda10_version.split(".")[:2])
try:
cuda11_version = buildbotConfig["cuda11"]["version"]
except:
except KeyError:
cuda11_version = buildbotConfig["sdks"]["cuda11"]["version"]
cuda11_folder_version = ".".join(cuda11_version.split(".")[:2])
@ -324,7 +340,7 @@ def get_cmake_options(builder: worker.blender.CodeBuilder) -> worker.utils.CmdSe
cuda12_version = buildbotConfig["cuda12"]["version"]
cuda12_folder_version = ".".join(cuda12_version.split(".")[:2])
have_cuda12 = True
except:
except KeyError:
have_cuda12 = False
if builder.platform == "windows" and builder.architecture != "arm64":
@ -408,7 +424,9 @@ def get_cmake_options(builder: worker.blender.CodeBuilder) -> worker.utils.CmdSe
else:
# Use new CMake option.
options += [f"-DCUDA10_NVCC_EXECUTABLE:STRING={cuda10_file_path}"]
options += ["-DCUDA_HOST_COMPILER=/opt/rh/devtoolset-8/root/usr/bin/gcc"]
options += [
"-DCUDA_HOST_COMPILER=/opt/rh/devtoolset-8/root/usr/bin/gcc"
]
# CUDA 11 or 12.
if have_cuda12:
@ -428,7 +446,9 @@ def get_cmake_options(builder: worker.blender.CodeBuilder) -> worker.utils.CmdSe
def clean_directories(builder: worker.blender.CodeBuilder) -> None:
worker.utils.info(f"Cleaning directory [{builder.install_dir})] from the previous run")
worker.utils.info(
f"Cleaning directory [{builder.install_dir})] from the previous run"
)
worker.utils.remove_dir(builder.install_dir)
os.makedirs(builder.build_dir, exist_ok=True)
@ -452,7 +472,9 @@ def cmake_configure(builder: worker.blender.CodeBuilder) -> None:
worker.utils.info("CMake configure options")
cmake_options = get_cmake_options(builder)
cmd = ["cmake", "-S", builder.blender_dir, "-B", builder.build_dir] + list(cmake_options)
cmd = ["cmake", "-S", builder.blender_dir, "-B", builder.build_dir] + list(
cmake_options
)
builder.call(cmd)
# This hack does not work as expected, since cmake cache is the always updated, we end up recompiling on each compile step, code, gpu and install
@ -465,7 +487,10 @@ def cmake_configure(builder: worker.blender.CodeBuilder) -> None:
fout = open(tmp_cmake_cache_file_path, "wt")
for line in fin:
# worker.utils.info(line)
if "OpenMP_pthread_LIBRARY:FILEPATH=OpenMP_pthread_LIBRARY-NOTFOUND" in line:
if (
"OpenMP_pthread_LIBRARY:FILEPATH=OpenMP_pthread_LIBRARY-NOTFOUND"
in line
):
worker.utils.warning(
"Replacing [OpenMP_pthread_LIBRARY-NOTFOUND] to [/usr/lib64/libpthread.a]"
)
@ -489,7 +514,9 @@ def cmake_build(builder: worker.blender.CodeBuilder, do_install: bool) -> None:
else:
estimate_gpu_memory_in_mb = 6000
estimate_core_memory_in_mb = estimate_gpu_memory_in_mb if builder.needs_gpu_binaries else 1000
estimate_core_memory_in_mb = (
estimate_gpu_memory_in_mb if builder.needs_gpu_binaries else 1000
)
ideal_cpu_count = fetch_ideal_cpu_count(estimate_core_memory_in_mb)
# Enable verbose building to make ninja to output more often.

View file

@ -82,7 +82,10 @@ def create_tar_xz(src: pathlib.Path, dest: pathlib.Path, package_name: str) -> N
for root, dirs, files in os.walk(src):
package_root = os.path.join(package_name, root[ln:])
flist.extend(
[(os.path.join(root, file), os.path.join(package_root, file)) for file in files]
[
(os.path.join(root, file), os.path.join(package_root, file))
for file in files
]
)
# Set UID/GID of archived files to 0, otherwise they'd be owned by whatever
@ -112,7 +115,7 @@ def cleanup_files(dirpath: pathlib.Path, extension: str) -> None:
def pack_mac(builder: worker.blender.CodeBuilder) -> None:
version_info = worker.blender.version.VersionInfo(builder)
worker.blender.version.VersionInfo(builder)
os.chdir(builder.build_dir)
cleanup_files(builder.package_dir, ".dmg")
@ -121,15 +124,24 @@ def pack_mac(builder: worker.blender.CodeBuilder) -> None:
package_file_name = package_name + ".dmg"
package_file_path = builder.package_dir / package_file_name
applescript_file_path = pathlib.Path(__file__).parent.resolve() / "blender.applescript"
background_image_file_path = builder.blender_dir / "release" / "darwin" / "background.tif"
applescript_file_path = (
pathlib.Path(__file__).parent.resolve() / "blender.applescript"
)
background_image_file_path = (
builder.blender_dir / "release" / "darwin" / "background.tif"
)
worker.blender.bundle_dmg.bundle(
builder.install_dir, package_file_path, applescript_file_path, background_image_file_path
builder.install_dir,
package_file_path,
applescript_file_path,
background_image_file_path,
)
# Sign
worker.blender.sign.sign_darwin_files(builder, [package_file_path], "entitlements.plist")
worker.blender.sign.sign_darwin_files(
builder, [package_file_path], "entitlements.plist"
)
# Notarize
worker_config = builder.get_worker_config()
@ -169,7 +181,14 @@ def pack_mac(builder: worker.blender.CodeBuilder) -> None:
# Show logs
worker.utils.call(
["xcrun", "notarytool", "log", "--keychain-profile", keychain_profile, request_id],
[
"xcrun",
"notarytool",
"log",
"--keychain-profile",
keychain_profile,
request_id,
],
retry_count=5,
retry_wait_time=10.0,
)
@ -262,14 +281,17 @@ def pack_win(builder: worker.blender.CodeBuilder, pack_format: str) -> None:
/ "ZIP"
/ f"{final_package_file_name}"
)
worker.utils.info(f"Moving [{source_cpack_file_path}] to [{final_package_file_path}]")
worker.utils.info(
f"Moving [{source_cpack_file_path}] to [{final_package_file_path}]"
)
os.rename(source_cpack_file_path, final_package_file_path)
else:
os.rename(bogus_cpack_file_path, final_package_file_path)
version_info = worker.blender.version.VersionInfo(builder)
description = f"Blender {version_info.version}"
worker.blender.sign.sign_windows_files(builder.service_env_id, [final_package_file_path],
description=description)
worker.blender.sign.sign_windows_files(
builder.service_env_id, [final_package_file_path], description=description
)
generate_file_hash(final_package_file_path)
@ -289,9 +311,13 @@ def pack_linux(builder: worker.blender.CodeBuilder) -> None:
py_target = builder.install_dir / version_info.short_version
if not os.path.exists(py_target):
# Support older format and current issue with 3.00
py_target = builder.install_dir / ("%d.%02d" % (version_info.major, version_info.minor))
py_target = builder.install_dir / (
"%d.%02d" % (version_info.major, version_info.minor)
)
worker.utils.call(["find", py_target, "-iname", "*.so", "-exec", "strip", "-s", "{}", ";"])
worker.utils.call(
["find", py_target, "-iname", "*.so", "-exec", "strip", "-s", "{}", ";"]
)
package_name = get_package_name(builder)
package_file_name = f"{package_name}.tar.xz"

View file

@ -22,7 +22,7 @@ def sign_windows_files(
worker_config = conf.worker.get_config(service_env_id)
# TODO: Rotate them if first 1 fails
timeserver = worker_config.sign_code_windows_time_servers[0]
worker_config.sign_code_windows_time_servers[0]
server_url = worker_config.sign_code_windows_server_url
if not certificate_id:
certificate_id = worker_config.sign_code_windows_certificate
@ -50,7 +50,9 @@ def sign_windows_files(
for i in range(0, len(file_paths), chunk_size):
file_chunks = file_paths[i : i + chunk_size]
worker.utils.call(list(cmd) + list(file_chunks), retry_count=retry_count, dry_run=dry_run)
worker.utils.call(
list(cmd) + list(file_chunks), retry_count=retry_count, dry_run=dry_run
)
def sign_windows(service_env_id: str, install_path: pathlib.Path) -> None:
@ -97,9 +99,11 @@ def sign_windows(service_env_id: str, install_path: pathlib.Path) -> None:
def sign_darwin_files(
builder: worker.blender.CodeBuilder,
file_paths: Sequence[pathlib.Path],
entitlements_file_name: str
entitlements_file_name: str,
) -> None:
entitlements_path = builder.code_path / "release" / "darwin" / entitlements_file_name
entitlements_path = (
builder.code_path / "release" / "darwin" / entitlements_file_name
)
if not entitlements_path.exists():
raise Exception(f"File {entitlements_path} not found, aborting")
@ -128,7 +132,9 @@ def sign_darwin_files(
# Remove signature
if file_path.suffix != ".dmg":
worker.utils.call(
["codesign", "--remove-signature", file_path], exit_on_error=False, dry_run=dry_run
["codesign", "--remove-signature", file_path],
exit_on_error=False,
dry_run=dry_run,
)
# Add signature
@ -163,11 +169,15 @@ def sign_darwin(builder: worker.blender.CodeBuilder) -> None:
sign_darwin_files(builder, list(sign_path.rglob("*")), "entitlements.plist")
# Thumbnailer app extension.
thumbnailer_appex_path = bundle_path / "Contents" / "PlugIns" / "blender-thumbnailer.appex"
thumbnailer_appex_path = (
bundle_path / "Contents" / "PlugIns" / "blender-thumbnailer.appex"
)
if thumbnailer_appex_path.exists():
sign_path = thumbnailer_appex_path / "Contents" / "MacOS"
worker.utils.info(f"Collecting files to process in {sign_path}")
sign_darwin_files(builder, list(sign_path.rglob("*")), "thumbnailer_entitlements.plist")
sign_darwin_files(
builder, list(sign_path.rglob("*")), "thumbnailer_entitlements.plist"
)
# Shared librarys and Python
sign_path = bundle_path / "Contents" / "Resources"

View file

@ -36,7 +36,9 @@ def package_for_upload(builder: worker.blender.CodeBuilder, success: bool) -> No
package_filename = "tests-" + worker.blender.pack.get_package_name(builder)
package_filepath = package_tests_dir / package_filename
shutil.copytree(build_tests_dir, package_filepath)
shutil.make_archive(str(package_filepath), "zip", package_tests_dir, package_filename)
shutil.make_archive(
str(package_filepath), "zip", package_tests_dir, package_filename
)
shutil.rmtree(package_filepath)
# Always upload unpacked folder for main and release tracks,

View file

@ -32,8 +32,12 @@ def update(builder: worker.blender.CodeBuilder) -> None:
make_update_text = make_update_path.read_text()
if "def svn_update" in make_update_text:
worker.utils.error("Can't build branch or pull request that uses Subversion libraries.")
worker.utils.error("Merge with latest main or release branch to use Git LFS libraries.")
worker.utils.error(
"Can't build branch or pull request that uses Subversion libraries."
)
worker.utils.error(
"Merge with latest main or release branch to use Git LFS libraries."
)
sys.exit(1)
# Run make update

View file

@ -14,12 +14,18 @@ class VersionInfo:
# Get version information
buildinfo_h = builder.build_dir / "source" / "creator" / "buildinfo.h"
blender_h = (
builder.blender_dir / "source" / "blender" / "blenkernel" / "BKE_blender_version.h"
builder.blender_dir
/ "source"
/ "blender"
/ "blenkernel"
/ "BKE_blender_version.h"
)
version_number = int(self._parse_header_file(blender_h, "BLENDER_VERSION"))
version_number_patch = int(self._parse_header_file(blender_h, "BLENDER_VERSION_PATCH"))
version_number_patch = int(
self._parse_header_file(blender_h, "BLENDER_VERSION_PATCH")
)
self.major, self.minor, self.patch = (
version_number // 100,
version_number % 100,
@ -38,14 +44,16 @@ class VersionInfo:
self.hash = self._parse_header_file(buildinfo_h, "BUILD_HASH")[1:-1]
else:
self.hash = ""
self.risk_id = self.version_cycle.replace("release", "stable").replace("rc", "candidate")
self.risk_id = self.version_cycle.replace("release", "stable").replace(
"rc", "candidate"
)
self.is_development_build = self.version_cycle == "alpha"
def _parse_header_file(self, filename: pathlib.Path, define: str) -> str:
regex = re.compile(r"^#\s*define\s+%s\s+(.*)" % define)
with open(filename, "r") as file:
for l in file:
match = regex.match(l)
for line in file:
match = regex.match(line)
if match:
return match.group(1)

View file

@ -52,7 +52,9 @@ if __name__ == "__main__":
steps["clean"] = worker.deploy.CodeDeployBuilder.clean
parser = worker.blender.create_argument_parser(steps=steps)
parser.add_argument("--store-id", type=str, choices=["snap", "steam", "windows"], required=True)
parser.add_argument(
"--store-id", type=str, choices=["snap", "steam", "windows"], required=True
)
args = parser.parse_args()
builder = worker.deploy.CodeStoreBuilder(args)

View file

@ -22,7 +22,9 @@ def get_os_release() -> str:
def get_cpu_info() -> str:
if platform.system() == "Darwin":
return worker.utils.check_output(["/usr/sbin/sysctl", "-n", "machdep.cpu.brand_string"])
return worker.utils.check_output(
["/usr/sbin/sysctl", "-n", "machdep.cpu.brand_string"]
)
elif platform.system() == "Linux":
cpuinfo = pathlib.Path("/proc/cpuinfo").read_text()
for line in cpuinfo.splitlines():
@ -101,9 +103,9 @@ def clean(builder: worker.utils.Builder) -> None:
sorted_paths: List[Tuple[float, pathlib.Path]] = []
for delete_path in optional_delete_paths:
try:
sorted_paths += [(os.path.getmtime(delete_path), delete_path)]
except:
pass
sorted_paths.append((os.path.getmtime(delete_path), delete_path))
except (FileNotFoundError, PermissionError) as e:
worker.utils.warning(f"Unable to access {delete_path}: {e}")
for _, delete_path in sorted(sorted_paths):
worker.utils.remove_dir(delete_path)
@ -128,7 +130,9 @@ def configure_machine(builder: worker.utils.Builder) -> None:
print(f"Release: {get_os_release()}")
print(f"Version: {platform.version()}")
print(f"Processor: {processor}")
print(f"Cores: {psutil.cpu_count()} logical, {psutil.cpu_count(logical=False)} physical")
print(
f"Cores: {psutil.cpu_count()} logical, {psutil.cpu_count(logical=False)} physical"
)
print(f"Total Memory: {psutil.virtual_memory().total / (1024**3):.2f} GB")
print(f"Available Memory: {psutil.virtual_memory().available / (1024**3):.2f} GB")
@ -194,6 +198,11 @@ def configure_machine(builder: worker.utils.Builder) -> None:
proc.kill()
for proc in psutil.process_iter():
if proc.name().lower() in ["blender", "blender.exe", "blender_test", "blender_test.exe"]:
if proc.name().lower() in [
"blender",
"blender.exe",
"blender_test",
"blender_test.exe",
]:
worker.utils.warning("Killing stray Blender process")
proc.kill()

View file

@ -19,10 +19,6 @@ checksums = ["md5", "sha256"]
def pull(builder: worker.deploy.CodeDeployBuilder) -> None:
retry_count = 0
retry_delay_in_seconds = 30
timeout_in_seconds = 60
pipeline_category = "daily"
if builder.track_id == "vexp":
pipeline_category = "experimental"
@ -75,7 +71,11 @@ def pull(builder: worker.deploy.CodeDeployBuilder) -> None:
# Prefer more stable builds, to avoid issue when multiple are present.
risk_id_order = ["stable", "candidate", "rc", "beta", "alpha", "edge"]
risk = build["risk_id"]
risk = risk_id_order.index(risk) if risk in risk_id_order else len(risk_id_order)
risk = (
risk_id_order.index(risk)
if risk in risk_id_order
else len(risk_id_order)
)
other_risk = unique_builds[key]["risk_id"]
other_risk = (
risk_id_order.index(other_risk)
@ -92,7 +92,9 @@ def pull(builder: worker.deploy.CodeDeployBuilder) -> None:
builds = list(unique_builds.values())
if len(builds) == 0:
raise Exception(f"No builds found for version [{version_info.version}] in [{search_url}]")
raise Exception(
f"No builds found for version [{version_info.version}] in [{search_url}]"
)
# Download builds.
worker.utils.remove_dir(builder.download_dir)
@ -113,7 +115,9 @@ def pull(builder: worker.deploy.CodeDeployBuilder) -> None:
# Moving to build_package folder
worker.utils.info(f"Move to [{builder.package_dir}]")
worker.utils.move(download_file_path, builder.package_dir / download_file_path.name)
worker.utils.move(
download_file_path, builder.package_dir / download_file_path.name
)
worker.utils.remove_dir(builder.download_dir)
@ -164,7 +168,9 @@ def repackage(builder: worker.deploy.CodeDeployBuilder) -> None:
if file_extension == "zip":
worker.utils.info(f"Renaming internal folder to [{new_folder_name}]")
worker.utils.call(["7z", "rn", dest_file_path, current_folder_name, new_folder_name])
worker.utils.call(
["7z", "rn", dest_file_path, current_folder_name, new_folder_name]
)
elif file_extension == "tar.xz":
worker.utils.info(f"Extracting [{source_file_path}] to [{dest_file_path}]")
worker.utils.call(["tar", "-xf", source_file_path, "--directory", "."])
@ -198,11 +204,15 @@ def repackage(builder: worker.deploy.CodeDeployBuilder) -> None:
for checksum in checksums:
checksum_text = ""
for filepath in checksum_file_paths:
checksum_line = worker.utils.check_output([f"{checksum}sum", filepath.name]).strip()
checksum_line = worker.utils.check_output(
[f"{checksum}sum", filepath.name]
).strip()
checksum_text += checksum_line + "\n"
print(checksum_text)
checksum_filepath = deployable_path / f"blender-{version_info.version}.{checksum}"
checksum_filepath = (
deployable_path / f"blender-{version_info.version}.{checksum}"
)
checksum_filepath.write_text(checksum_text)
@ -218,34 +228,53 @@ def deploy(builder: worker.deploy.CodeDeployBuilder) -> None:
if builder.service_env_id != "PROD":
# Already assumed to exist on production
worker.utils.call_ssh(connect_id, ["mkdir", "-p", remote_dest_path], dry_run=dry_run)
worker.utils.call_ssh(
connect_id, ["mkdir", "-p", remote_dest_path], dry_run=dry_run
)
for source_path in builder.package_source_dir.iterdir():
dest_path = f"{connect_id}:{remote_dest_path}/"
worker.utils.info(f"Deploying source package [{source_path}]")
worker.utils.rsync(
source_path, dest_path, change_modes=change_modes, show_names=True, dry_run=dry_run
source_path,
dest_path,
change_modes=change_modes,
show_names=True,
dry_run=dry_run,
)
worker.utils.call_ssh(connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run)
worker.utils.call_ssh(
connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run
)
# Copy binaries
version_info = worker.blender.version.VersionInfo(builder)
major_minor_version = version_info.short_version
remote_dest_path = (
pathlib.Path(worker_config.download_release_folder) / f"Blender{major_minor_version}"
pathlib.Path(worker_config.download_release_folder)
/ f"Blender{major_minor_version}"
)
deployable_path = builder.package_dir / "deployable"
change_modes = ["F0444"]
worker.utils.call_ssh(connect_id, ["mkdir", "-p", remote_dest_path], dry_run=dry_run)
worker.utils.call_ssh(connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run)
worker.utils.call_ssh(
connect_id, ["mkdir", "-p", remote_dest_path], dry_run=dry_run
)
worker.utils.call_ssh(
connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run
)
for source_path in deployable_path.iterdir():
dest_path = f"{connect_id}:{remote_dest_path}/"
worker.utils.info(f"Deploying binary package [{source_path}]")
worker.utils.rsync(
source_path, dest_path, change_modes=change_modes, show_names=True, dry_run=dry_run
source_path,
dest_path,
change_modes=change_modes,
show_names=True,
dry_run=dry_run,
)
worker.utils.call_ssh(connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run)
worker.utils.call_ssh(
connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run
)

View file

@ -37,7 +37,9 @@ def monitor(builder: worker.deploy.CodeDeployBuilder) -> None:
stop_on_required_site_found = False
branches_config = builder.get_branches_config()
expected_platforms = branches_config.code_official_platform_architectures[builder.track_id]
expected_platforms = branches_config.code_official_platform_architectures[
builder.track_id
]
expected_file_count = len(worker.deploy.artifacts.checksums)
for expected_platform in expected_platforms:
@ -61,7 +63,9 @@ def monitor(builder: worker.deploy.CodeDeployBuilder) -> None:
print(f"Checking [{search_url}] for version [{version_info.version}]")
# Header to avoid getting permission denied.
request = urllib.request.Request(search_url, headers={"User-Agent": "Mozilla"})
request = urllib.request.Request(
search_url, headers={"User-Agent": "Mozilla"}
)
try:
response = urllib.request.urlopen(request, timeout=5.0)
@ -71,7 +75,7 @@ def monitor(builder: worker.deploy.CodeDeployBuilder) -> None:
text = ""
matches = set(re.findall(file_pattern, text))
found_file_count = len(matches)
len(matches)
for match in matches:
print(f"File [{match}]")
@ -93,7 +97,9 @@ def monitor(builder: worker.deploy.CodeDeployBuilder) -> None:
print("")
print("=" * 80)
print(f"Sites [{found_site_count} of {len(monitored_base_urls)}] have all files")
print(
f"Sites [{found_site_count} of {len(monitored_base_urls)}] have all files"
)
print("=" * 80)
if found_site_count == len(monitored_base_urls):

View file

@ -54,7 +54,9 @@ def pull(builder: worker.deploy.CodeDeployBuilder) -> None:
# Check expected platforms
branches_config = builder.get_branches_config()
expected_platforms = branches_config.code_official_platform_architectures[builder.track_id]
expected_platforms = branches_config.code_official_platform_architectures[
builder.track_id
]
if len(expected_platforms) != len(matching_builds):
platform_names = "\n".join(expected_platforms)
raise Exception("Unexpected number of builds, expected:\n" + platform_names)
@ -81,7 +83,9 @@ def deliver(builder: worker.deploy.CodeDeployBuilder) -> None:
# Check expected platforms
branches_config = builder.get_branches_config()
expected_platforms = branches_config.code_official_platform_architectures[builder.track_id]
expected_platforms = branches_config.code_official_platform_architectures[
builder.track_id
]
wheel_names = "\n".join([wheel.name for wheel in wheels])
wheel_paths = [str(wheel) for wheel in wheels]
print(wheel_names)

View file

@ -51,14 +51,20 @@ def package(builder: worker.deploy.CodeStoreBuilder) -> None:
snap_source_root_path = freedesktop_path / "snap"
blender_icon_file_name = "blender.svg"
snapcraft_template_file_path = snap_source_root_path / "blender-snapcraft-template.yaml"
snapcraft_template_file_path = (
snap_source_root_path / "blender-snapcraft-template.yaml"
)
worker.utils.info(f"Using snap config file [{snapcraft_template_file_path}]")
snapcraft_text = snapcraft_template_file_path.read_text()
snapcraft_text = snapcraft_text.replace("@VERSION@", version_info.version)
snapcraft_text = snapcraft_text.replace("@GRADE@", grade)
snapcraft_text = snapcraft_text.replace("@ICON_PATH@", f"./{blender_icon_file_name}")
snapcraft_text = snapcraft_text.replace("@PACKAGE_PATH@", f"./{linux_package_file_path.name}")
snapcraft_text = snapcraft_text.replace(
"@ICON_PATH@", f"./{blender_icon_file_name}"
)
snapcraft_text = snapcraft_text.replace(
"@PACKAGE_PATH@", f"./{linux_package_file_path.name}"
)
snapcraft_file_path = builder.store_snap_dir / "snapcraft.yaml"
worker.utils.info(f"Saving snapcraft config file [{snapcraft_file_path}]")
@ -74,7 +80,9 @@ def package(builder: worker.deploy.CodeStoreBuilder) -> None:
os.chdir(builder.store_snap_dir)
# Copy all required files into working folder
source_file_path = freedesktop_path / "icons" / "scalable" / "apps" / blender_icon_file_name
source_file_path = (
freedesktop_path / "icons" / "scalable" / "apps" / blender_icon_file_name
)
dest_file_path = builder.store_snap_dir / "blender.svg"
worker.utils.info(f"Copy file [{source_file_path}] -> [{dest_file_path}]")
worker.utils.copy_file(source_file_path, dest_file_path)
@ -87,7 +95,8 @@ def package(builder: worker.deploy.CodeStoreBuilder) -> None:
worker.utils.call(["snapcraft", "clean", "--use-lxd"], dry_run=dry_run)
worker.utils.call(["snapcraft", "--use-lxd"], dry_run=dry_run)
worker.utils.call(
["review-tools.snap-review", snap_package_file_path, "--allow-classic"], dry_run=dry_run
["review-tools.snap-review", snap_package_file_path, "--allow-classic"],
dry_run=dry_run,
)
if dry_run:
@ -110,11 +119,14 @@ def deliver(builder: worker.deploy.CodeStoreBuilder) -> None:
branches_config = builder.get_branches_config()
is_lts = builder.track_id in branches_config.all_lts_tracks
is_latest = (
branches_config.track_major_minor_versions[builder.track_id] == version_info.short_version
branches_config.track_major_minor_versions[builder.track_id]
== version_info.short_version
)
# Never push to stable
snap_risk_id = version_info.risk_id.replace("stable", "candidate").replace("alpha", "edge")
snap_risk_id = version_info.risk_id.replace("stable", "candidate").replace(
"alpha", "edge"
)
if snap_risk_id == "stable":
raise Exception("Delivery to [stable] channel not allowed")
@ -139,7 +151,9 @@ def deliver(builder: worker.deploy.CodeStoreBuilder) -> None:
worker_config = builder.get_worker_config()
env = os.environ.copy()
env["SNAPCRAFT_STORE_CREDENTIALS"] = worker_config.snap_credentials(builder.service_env_id)
env["SNAPCRAFT_STORE_CREDENTIALS"] = worker_config.snap_credentials(
builder.service_env_id
)
# If this fails, then the permissions were not set correcty with acls
worker.utils.call(["snapcraft", "status", "blender"], dry_run=dry_run, env=env)

View file

@ -9,7 +9,9 @@ import worker.deploy
import worker.utils
def _package(builder: worker.deploy.CodeDeployBuilder, needs_complete: bool = False) -> None:
def _package(
builder: worker.deploy.CodeDeployBuilder, needs_complete: bool = False
) -> None:
os.chdir(builder.code_path)
if needs_complete:
worker.utils.call(["make", "source_archive_complete"])

View file

@ -13,7 +13,9 @@ import worker.utils
def extract_file(
builder: worker.deploy.CodeStoreBuilder, source_file_path: pathlib.Path, platform: str
builder: worker.deploy.CodeStoreBuilder,
source_file_path: pathlib.Path,
platform: str,
) -> None:
worker.utils.info(f"Extracting artifact [{source_file_path}] for Steam")
if not source_file_path.exists():
@ -33,7 +35,9 @@ def extract_file(
# Move any folder there as ./content
for source_content_path in dest_extract_path.iterdir():
if source_content_path.is_dir():
worker.utils.info(f"Move [{source_content_path.name}] -> [{dest_content_path}]")
worker.utils.info(
f"Move [{source_content_path.name}] -> [{dest_content_path}]"
)
worker.utils.move(source_content_path, dest_content_path)
break
@ -55,8 +59,12 @@ def extract_file(
worker.utils.remove_file(image_file_path)
worker.utils.info(f"Move Blender app from [{source_content_path}] -> [{dest_content_path}]")
worker.utils.move(source_content_path / "Blender.app", dest_content_path / "Blender.app")
worker.utils.info(
f"Move Blender app from [{source_content_path}] -> [{dest_content_path}]"
)
worker.utils.move(
source_content_path / "Blender.app", dest_content_path / "Blender.app"
)
worker.utils.remove_dir(source_content_path)
elif platform == "windows":
worker.utils.info(f"Extracting zip file [{source_file_path}]")
@ -66,7 +74,9 @@ def extract_file(
# Move any folder there as ./content
for source_content_path in dest_extract_path.iterdir():
if source_content_path.is_dir():
worker.utils.info(f"Move [{source_content_path.name}] -> [{dest_content_path}]")
worker.utils.info(
f"Move [{source_content_path.name}] -> [{dest_content_path}]"
)
worker.utils.move(source_content_path, dest_content_path)
break
else:
@ -97,9 +107,10 @@ def build(builder: worker.deploy.CodeStoreBuilder, is_preview: bool) -> None:
version_info = worker.blender.version.VersionInfo(builder)
branches_config = builder.get_branches_config()
is_lts = builder.track_id in branches_config.all_lts_tracks
is_latest = branches_config.track_major_minor_versions["vdev"] == version_info.short_version
is_latest = (
branches_config.track_major_minor_versions["vdev"] == version_info.short_version
)
track_path = builder.track_path
log_path = builder.track_path / "log"
worker.utils.remove_dir(log_path)
os.makedirs(log_path, exist_ok=True)

View file

@ -52,12 +52,16 @@ def _package_architecture(
input_file_path = builder.package_dir / build["file_name"]
break
if not input_file_path:
raise Exception(f"Windows package not found in [{builder.package_dir}] manifest")
raise Exception(
f"Windows package not found in [{builder.package_dir}] manifest"
)
# Copy all required files into working folder
source_path = builder.code_path / "release" / "windows" / "msix"
dest_path = builder.store_windows_dir
worker.utils.info(f"Copying [{source_path}] -> [{dest_path}] for windows store packaging")
worker.utils.info(
f"Copying [{source_path}] -> [{dest_path}] for windows store packaging"
)
for source_file in source_path.iterdir():
if source_file.name == "README.md":
@ -104,7 +108,9 @@ def package(builder: worker.deploy.CodeStoreBuilder) -> None:
raise Exception("Can only run this on Windows, aborting")
branches_config = builder.get_branches_config()
expected_platforms = branches_config.code_official_platform_architectures[builder.track_id]
expected_platforms = branches_config.code_official_platform_architectures[
builder.track_id
]
for expected_platform in expected_platforms:
if expected_platform.startswith("windows"):

View file

@ -35,7 +35,9 @@ def download_api_dump_test_data(local_delivery_path: pathlib.Path) -> None:
api_base_url = "https://docs.blender.org/api"
api_dump_index_url = f"{api_base_url}/api_dump_index.json"
request = urllib.request.Request(api_dump_index_url, headers={"User-Agent": "Mozilla"})
request = urllib.request.Request(
api_dump_index_url, headers={"User-Agent": "Mozilla"}
)
response = urllib.request.urlopen(request, timeout=5.0)
api_dump_index_text = response.read().decode("utf-8", "ignore")
@ -48,7 +50,9 @@ def download_api_dump_test_data(local_delivery_path: pathlib.Path) -> None:
api_dump_url = f"{api_base_url}/{version}/api_dump.json"
worker.utils.info(f"Download {api_dump_url}")
request = urllib.request.Request(api_dump_url, headers={"User-Agent": "Mozilla"})
request = urllib.request.Request(
api_dump_url, headers={"User-Agent": "Mozilla"}
)
response = urllib.request.urlopen(request, timeout=5.0)
api_dump_text = response.read().decode("utf-8", "ignore")
@ -97,7 +101,10 @@ def compile_doc(builder: DocApiBuilder) -> None:
dest_path = api_dump_build_path
worker.utils.rsync(
source_path, dest_path, include_paths=api_dump_include_paths, exclude_paths=["*"]
source_path,
dest_path,
include_paths=api_dump_include_paths,
exclude_paths=["*"],
)
version = worker.blender.version.VersionInfo(builder).short_version
@ -125,7 +132,9 @@ def compile_doc(builder: DocApiBuilder) -> None:
in_path = builder.build_doc_path / "sphinx-in"
out_path = builder.build_doc_path / "sphinx-out-html"
worker.utils.call(["sphinx-build", "-b", "html", "-j", str(num_threads), in_path, out_path])
worker.utils.call(
["sphinx-build", "-b", "html", "-j", str(num_threads), in_path, out_path]
)
def package(builder: DocApiBuilder) -> None:
@ -177,7 +186,9 @@ def deliver(builder: DocApiBuilder) -> None:
# Put API dumps data on the server.
api_dump_build_path = f"{builder.build_doc_path}/api_dump/"
api_dump_dest_path = f"{connect_id}:{remote_path}/"
worker.utils.rsync(api_dump_build_path, api_dump_dest_path, change_modes=change_modes)
worker.utils.rsync(
api_dump_build_path, api_dump_dest_path, change_modes=change_modes
)
# Sync zip package
if builder.needs_package_delivery:
@ -189,7 +200,10 @@ def deliver(builder: DocApiBuilder) -> None:
source_file_path = builder.build_doc_path / package_file_name
dest_file_path = f"{connect_id}:{version_remote_path}/{package_file_name}"
worker.utils.rsync(
source_file_path, dest_file_path, exclude_paths=[".doctrees"], change_modes=change_modes
source_file_path,
dest_file_path,
exclude_paths=[".doctrees"],
change_modes=change_modes,
)
# Create links
@ -198,16 +212,19 @@ def deliver(builder: DocApiBuilder) -> None:
connect_id, ["ln", "-svF", remote_path / dev_version, remote_path / "dev"]
)
worker.utils.call_ssh(
connect_id, ["ln", "-svF", remote_path / dev_version, remote_path / "master"]
connect_id,
["ln", "-svF", remote_path / dev_version, remote_path / "master"],
)
worker.utils.call_ssh(
connect_id, ["ln", "-svF", remote_path / dev_version, remote_path / "main"]
)
worker.utils.call_ssh(
connect_id, ["ln", "-svF", remote_path / latest_version, remote_path / "latest"]
connect_id,
["ln", "-svF", remote_path / latest_version, remote_path / "latest"],
)
worker.utils.call_ssh(
connect_id, ["ln", "-svF", remote_path / latest_version, remote_path / "current"]
connect_id,
["ln", "-svF", remote_path / latest_version, remote_path / "current"],
)

View file

@ -29,7 +29,9 @@ def update(builder: DocDeveloperBuilder) -> None:
def compile_doc(builder: DocDeveloperBuilder) -> None:
os.chdir(builder.track_path)
worker.utils.call_pipenv(["install", "--requirements", builder.code_path / "requirements.txt"])
worker.utils.call_pipenv(
["install", "--requirements", builder.code_path / "requirements.txt"]
)
worker.utils.remove_dir(builder.output_path)
@ -48,7 +50,9 @@ def deliver(builder: DocDeveloperBuilder) -> None:
remote_path = f"developer.blender.org/webroot/{builder.service_env_id}/docs"
connect_id = f"{worker_config.docs_user}@{worker_config.docs_machine}"
server_docs_path = pathlib.Path(worker_config.docs_folder) / pathlib.Path(remote_path)
server_docs_path = pathlib.Path(worker_config.docs_folder) / pathlib.Path(
remote_path
)
change_modes = ["D0755", "F0644"]
source_path = f"{builder.output_path}/"

View file

@ -35,7 +35,9 @@ class ManualBuilder(worker.utils.Builder):
if self.needs_all_locales:
locale_path = self.code_path / "locale"
locales += [
item.name for item in locale_path.iterdir() if not item.name.startswith(".")
item.name
for item in locale_path.iterdir()
if not item.name.startswith(".")
]
return locales
@ -50,7 +52,9 @@ def update(builder: ManualBuilder) -> None:
def check(builder: ManualBuilder) -> None:
os.chdir(builder.track_path)
worker.utils.call_pipenv(["install", "--pre", "--requirements", builder.code_path / "requirements.txt"])
worker.utils.call_pipenv(
["install", "--pre", "--requirements", builder.code_path / "requirements.txt"]
)
os.chdir(builder.code_path)
@ -63,7 +67,9 @@ def check(builder: ManualBuilder) -> None:
def compile_doc(builder: ManualBuilder) -> None:
# Install requirements.
os.chdir(builder.track_path)
worker.utils.call_pipenv(["install", "--pre", "--requirements", builder.code_path / "requirements.txt"])
worker.utils.call_pipenv(
["install", "--pre", "--requirements", builder.code_path / "requirements.txt"]
)
# Determine format and locales
locales = builder.get_locales()
@ -127,8 +133,12 @@ def compile_doc(builder: ManualBuilder) -> None:
# Hack appropriate versions.json URL into version_switch.js
worker.utils.info("Replacing URL in version_switch.js")
version_switch_file_path = build_output_path / "_static" / "js" / "version_switch.js"
versions_file_url = f"https://docs.blender.org/{builder.service_env_id}/versions.json"
version_switch_file_path = (
build_output_path / "_static" / "js" / "version_switch.js"
)
versions_file_url = (
f"https://docs.blender.org/{builder.service_env_id}/versions.json"
)
version_switch_text = version_switch_file_path.read_text()
version_switch_text = version_switch_text.replace(
@ -229,17 +239,24 @@ def deliver(builder: ManualBuilder) -> None:
dest_path,
exclude_paths=[".doctrees", "blender_manual_*.zip"],
delete=True,
delete_path_check=str(version_remote_path)
delete_path_check=str(version_remote_path),
)
# Create links
if builder.track_id == "vdev":
worker.utils.info(f"Creating links for {locale}")
worker.utils.call_ssh(
connect_id, ["ln", "-svF", remote_path / dev_version, remote_path / "dev"]
connect_id,
["ln", "-svF", remote_path / dev_version, remote_path / "dev"],
)
worker.utils.call_ssh(
connect_id, ["ln", "-svF", remote_path / latest_version, remote_path / "latest"]
connect_id,
[
"ln",
"-svF",
remote_path / latest_version,
remote_path / "latest",
],
)
if builder.needs_package_delivery:
@ -281,7 +298,11 @@ if __name__ == "__main__":
parser.add_argument("--needs-all-locales", action="store_true", required=False)
parser.add_argument("--needs-package-delivery", action="store_true", required=False)
parser.add_argument(
"--doc-format", default="html", type=str, required=False, choices=["html", "epub"]
"--doc-format",
default="html",
type=str,
required=False,
choices=["html", "epub"],
)
args = parser.parse_args()

View file

@ -44,7 +44,9 @@ def deliver(builder: worker.utils.Builder) -> None:
change_modes = ["D0755", "F0644"]
if builder.service_env_id == "LOCAL" and builder.platform == "darwin":
worker.utils.warning("rsync change_owner not supported on darwin, ignoring for LOCAL")
worker.utils.warning(
"rsync change_owner not supported on darwin, ignoring for LOCAL"
)
change_owner = None
else:
change_owner = "buildbot:www-data"

View file

@ -102,7 +102,7 @@ def _log_cmd(msg: str) -> None:
_warnings += [msg]
return
print(msg.encode('ascii', errors='replace').decode('ascii'), flush=True)
print(msg.encode("ascii", errors="replace").decode("ascii"), flush=True)
# Command execution
@ -118,7 +118,9 @@ CmdFilterOutput = Optional[Callable[[str], Optional[str]]]
CmdEnvironment = Optional[Dict[str, str]]
def _prepare_call(cmd: CmdSequence, dry_run: bool = False) -> Sequence[Union[str, pathlib.Path]]:
def _prepare_call(
cmd: CmdSequence, dry_run: bool = False
) -> Sequence[Union[str, pathlib.Path]]:
real_cmd: List[Union[str, pathlib.Path]] = []
log_cmd: List[str] = []
@ -174,9 +176,9 @@ def call(
if line:
line_str = line.strip("\n\r")
if filter_output:
line_str_filter = filter_output(line_str)
filter_output(line_str)
else:
line_str_filter = line_str
pass
if line_str:
_log_cmd(line_str)
else:
@ -206,7 +208,9 @@ def check_output(cmd: CmdSequence, exit_on_error: bool = True) -> str:
sys.stderr.flush()
try:
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, universal_newlines=True)
output = subprocess.check_output(
cmd, stderr=subprocess.STDOUT, universal_newlines=True
)
except subprocess.CalledProcessError as e:
if exit_on_error:
sys.exit(e.returncode)
@ -255,9 +259,8 @@ def rsync(
if str(dest_path).find(delete_path_check) == -1:
raise Exception("Rsync: remote path must contain '{delete_path_check}'")
info_options = "progress0,flist0,name0,stats2"
if show_names:
info_options = "progress0,flist0,name1,stats2"
pass
cmd: List[Union[str, pathlib.Path, HiddenArgument]] = [
"rsync",
@ -294,20 +297,27 @@ def move(path_from: pathlib.Path, path_to: pathlib.Path, dry_run: bool = False)
shutil.move(str(path_from), path_to)
def copy_dir(path_from: pathlib.Path, path_to: pathlib.Path, dry_run: bool = False) -> None:
def copy_dir(
path_from: pathlib.Path, path_to: pathlib.Path, dry_run: bool = False
) -> None:
if dry_run:
return
shutil.copytree(path_from, path_to)
def copy_file(path_from: pathlib.Path, path_to: pathlib.Path, dry_run: bool = False) -> None:
def copy_file(
path_from: pathlib.Path, path_to: pathlib.Path, dry_run: bool = False
) -> None:
if dry_run:
return
shutil.copy2(path_from, path_to)
def remove_file(
path: pathlib.Path, retry_count: int = 3, retry_wait_time: float = 5.0, dry_run: bool = False
path: pathlib.Path,
retry_count: int = 3,
retry_wait_time: float = 5.0,
dry_run: bool = False,
) -> None:
if not path.exists():
return
@ -316,52 +326,54 @@ def remove_file(
return
info(f"Removing {path}")
for try_count in range(0, retry_count):
try:
for try_count in range(retry_count):
try:
if path.exists():
path.unlink()
except FileNotFoundError:
pass
return
except:
except FileNotFoundError:
# File was already removed by another process.
return
except PermissionError as e:
warning(f"Permission error when removing {path}: {e}")
time.sleep(retry_wait_time)
except OSError as e:
warning(f"OS error when removing {path}: {e}")
time.sleep(retry_wait_time)
# Not using missing_ok yet for Python3.6 compatibility.
# Final attempt outside the retry loop
try:
if path.exists():
path.unlink()
except FileNotFoundError:
pass
except PermissionError as e:
error(f"Failed to remove {path} due to permission issues: {e}")
except OSError as e:
error(f"Failed to remove {path} after retries due to OS error: {e}")
# Retry several times by default, giving it a chance for possible antivirus to release
# a lock on files in the build folder. Happened for example with MSI files on Windows.
def remove_dir(
path: pathlib.Path, retry_count: int = 3, retry_wait_time: float = 5.0, dry_run: bool = False
path: pathlib.Path, retry_count: int = 3, retry_wait_time: float = 5.0
) -> None:
if not path.exists():
return
if dry_run:
info(f"Removing {path} (dry run)")
return
info(f"Removing {path}")
for try_count in range(0, retry_count):
for try_count in range(retry_count):
try:
if path.exists():
shutil.rmtree(path)
return
except:
if platform.system().lower() == "windwos":
# XXX: Windows builder debug.
# Often the `build_package` is failed to be removed because
# of the "Access Denied" error on blender-windows64.msi.
# Run some investigation commands to see what is going on.
return # Successfully removed, no need to retry
except PermissionError as e:
if platform.system().lower() == "windows":
# Debugging access denied errors on Windows
if path.name == "build_package":
info("Removal of package artifacts folder failed. Investigating...")
msi_path = (
path / "_CPack_Packages" / "Windows" / "WIX" / "blender-windows64.msi"
path
/ "_CPack_Packages"
/ "Windows"
/ "WIX"
/ "blender-windows64.msi"
)
if msi_path.exists():
info(f"Information about [{msi_path}]")
@ -376,11 +388,23 @@ def remove_dir(
)
else:
info(f"MSI package file [{msi_path}] does not exist")
warning(f"Permission error when removing {path}: {e}")
time.sleep(retry_wait_time)
except FileNotFoundError:
# The directory is already gone; no action needed.
return
except OSError as e:
warning(f"OS error when attempting to remove {path}: {e}")
time.sleep(retry_wait_time)
# Final attempt outside of retries
if path.exists():
try:
shutil.rmtree(path)
except PermissionError as e:
error(f"Failed to remove {path} due to permission issues: {e}")
except OSError as e:
error(f"Failed to remove {path} after retries due to OS error: {e}")
def is_tool(name: Union[str, pathlib.Path]) -> bool:
@ -409,7 +433,9 @@ def update_source(
warning("Removing git lock, probably left behind by killed git process")
remove_file(index_lock_path)
for index_lock_path in (code_path / ".git" / "modules").rglob("index.lock"):
warning("Removing submodule git lock, probably left behind by killed git process")
warning(
"Removing submodule git lock, probably left behind by killed git process"
)
remove_file(index_lock_path)
os.chdir(code_path)
@ -438,7 +464,15 @@ def update_source(
# Checkout pull request into PR123 branch.
call(["git", "checkout", "main"])
call(["git", "fetch", "-f", "origin", f"pull/{pull_request_id}/head:{branch_name}"])
call(
[
"git",
"fetch",
"-f",
"origin",
f"pull/{pull_request_id}/head:{branch_name}",
]
)
call(["git", "checkout", branch_name])
if commit_id and (commit_id != "HEAD"):