From 5b06c9273138cdf12328baa1c1e2a3fa3354137b Mon Sep 17 00:00:00 2001 From: Bart van der Braak Date: Tue, 19 Nov 2024 12:36:08 +0100 Subject: [PATCH 01/13] Set default config URL --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 0e43bc6..87d0094 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -5,7 +5,7 @@ services: environment: - 'HOSTNAME=${MASTER_HOSTNAME:-buildbot-master-1}' - 'BUILDBOT_CONFIG_DIR=${BUILDBOT_CONFIG_DIR:-config}' - - 'BUILDBOT_CONFIG_URL=${BUILDBOT_CONFIG_URL:-https://github.com/buildbot/buildbot-docker-example-config/archive/master.tar.gz}' + - 'BUILDBOT_CONFIG_URL=${BUILDBOT_CONFIG_URL:-https://git.braak.pro/api/packages/bartvdbraak/generic/builder.braak.pro/main/config.tar.gz}' - 'BUILDBOT_WORKER_PORT=${BUILDBOT_WORKER_PORT:-9989}' - 'BUILDBOT_WEB_URL=${BUILDBOT_WEB_URL:-http://localhost:8010/}' - 'BUILDBOT_WEB_PORT=${BUILDBOT_WEB_PORT:-tcp:port=8010}' -- 2.45.2 From 1c908c4b825ab9b4a268cfb80fb3d9315568e4bd Mon Sep 17 00:00:00 2001 From: Bart van der Braak Date: Tue, 19 Nov 2024 16:51:43 +0100 Subject: [PATCH 02/13] Add Make and Pipelines for checks --- .env.example | 3 + .forgejo/workflows/check.yml | 14 ++ .../workflows/{publish.yml => release.yml} | 10 +- .gitignore | 2 + Makefile | 16 ++ config/master.cfg | 117 +----------- config/setup.py | 169 ++++++++++++++++++ docker-compose.override.yml | 5 + mypy.ini | 3 + requirements.txt | 2 + 10 files changed, 226 insertions(+), 115 deletions(-) create mode 100644 .env.example create mode 100644 .forgejo/workflows/check.yml rename .forgejo/workflows/{publish.yml => release.yml} (61%) create mode 100644 .gitignore create mode 100644 Makefile create mode 100644 config/setup.py create mode 100644 docker-compose.override.yml create mode 100644 mypy.ini create mode 100644 requirements.txt diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..fd89b15 --- /dev/null +++ b/.env.example @@ -0,0 +1,3 @@ +SERVICE_USER_POSTGRESQL=buildbot +SERVICE_PASSWORD_POSTGRESQL=changeme! +BUILDBOT_CONFIG_URL='' \ No newline at end of file diff --git a/.forgejo/workflows/check.yml b/.forgejo/workflows/check.yml new file mode 100644 index 0000000..8f68941 --- /dev/null +++ b/.forgejo/workflows/check.yml @@ -0,0 +1,14 @@ +name: Run checks +on: [ push, pull_request ] +jobs: + checks: + runs-on: docker + env: + DIRECTORY: config + steps: + - uses: actions/checkout@v4 + - run: cd ${{ env.DIRECTORY }} + - run: pip install -r requirements-dev.txt + - run: ruff check + - run: ruff format + - run: myp \ No newline at end of file diff --git a/.forgejo/workflows/publish.yml b/.forgejo/workflows/release.yml similarity index 61% rename from .forgejo/workflows/publish.yml rename to .forgejo/workflows/release.yml index 1e2c061..68a5349 100644 --- a/.forgejo/workflows/publish.yml +++ b/.forgejo/workflows/release.yml @@ -1,3 +1,4 @@ +name: Release and deploy on: push: branches: @@ -6,18 +7,19 @@ jobs: artifact: runs-on: docker env: - ARCHIVE_FOLDER: config + DIRECTORY: config + PACKAGE_NAME: builder.braak.pro steps: - name: Checkout uses: actions/checkout@v4 - name: Archive - run: "tar -czvf ${{ env.ARCHIVE_FOLDER }}.tar.gz ${{ env.ARCHIVE_FOLDER }}" + run: "tar -czvf ${{ env.DIRECTORY }}.tar.gz ${{ env.DIRECTORY }}" - name: Upload run: | curl \ --user "${{ env.GITHUB_REPOSITORY_OWNER }}:${{ secrets.ACCESS_TOKEN }}" \ - --upload-file "${{ env.ARCHIVE_FOLDER }}.tar.gz" \ - "${{ env.GITHUB_SERVER_URL }}/api/packages/${{ env.GITHUB_REPOSITORY_OWNER }}/generic/builder.braak.pro/${{ env.GITHUB_REF_NAME }}/${{ env.ARCHIVE_FOLDER }}.tar.gz" + --upload-file "${{ env.DIRECTORY }}.tar.gz" \ + "${{ env.GITHUB_SERVER_URL }}/api/packages/${{ env.GITHUB_REPOSITORY_OWNER }}/generic/${{ env.PACKAGE_NAME }}/${{ env.GITHUB_REF_NAME }}/${{ env.ARCHIVE_FOLDER }}.tar.gz" - name: Deploy run: | curl -X "POST" \ diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..c2eabec --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +.venv +.env \ No newline at end of file diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..cc95b6a --- /dev/null +++ b/Makefile @@ -0,0 +1,16 @@ + +.PHONY: venv setup check +help: ## Display this help message + @echo "Usage:" + @echo " make " + @echo "" + @echo "Targets:" + @awk 'BEGIN {FS = ":.*?## "} /^[a-zA-Z_-]+:.*?## / {printf " %-20s %s\n", $$1, $$2}' $(MAKEFILE_LIST) + +setup: ## Create Python virtualenv and install dependencies + @if [ ! -f .env ]; then cp .env.example .env; fi + @if [ ! -d .venv ]; then python3 -m venv .venv; fi + @if [ -d .venv ]; then . .venv/bin/activate && pip3 install -r requirements.txt; fi + +check: ## Run linter, formatter and typechecks + ruff check && ruff format && mypy config/master.cfg \ No newline at end of file diff --git a/config/master.cfg b/config/master.cfg index a11d998..8d4475a 100644 --- a/config/master.cfg +++ b/config/master.cfg @@ -1,116 +1,11 @@ # -*- python -*- # ex: set filetype=python: - +import importlib import os +import sys -from buildbot.plugins import * +sys.path.insert(0, os.path.expanduser("~/git/blender-devops/buildbot")) -# This is a sample buildmaster config file. It must be installed as -# 'master.cfg' in your buildmaster's base directory. - -# This is the dictionary that the buildmaster pays attention to. We also use -# a shorter alias to save typing. -c = BuildmasterConfig = {} - -####### WORKERS - -# The 'workers' list defines the set of recognized workers. Each element is -# a Worker object, specifying a unique worker name and password. The same -# worker name and password must be configured on the worker. - -c['workers'] = [worker.Worker("example-worker", 'pass')] - -if 'BUILDBOT_MQ_URL' in os.environ: - c['mq'] = { - 'type' : 'wamp', - 'router_url': os.environ['BUILDBOT_MQ_URL'], - 'realm': os.environ.get('BUILDBOT_MQ_REALM', 'buildbot').decode('utf-8'), - 'debug' : 'BUILDBOT_MQ_DEBUG' in os.environ, - 'debug_websockets' : 'BUILDBOT_MQ_DEBUG' in os.environ, - 'debug_lowlevel' : 'BUILDBOT_MQ_DEBUG' in os.environ, - } -# 'protocols' contains information about protocols which master will use for -# communicating with workers. You must define at least 'port' option that workers -# could connect to your master with this protocol. -# 'port' must match the value configured into the workers (with their -# --master option) -c['protocols'] = {'pb': {'port': os.environ.get("BUILDBOT_WORKER_PORT", 9989)}} - -####### CHANGESOURCES - -# the 'change_source' setting tells the buildmaster how it should find out -# about source code changes. Here we point to the buildbot clone of pyflakes. - -c['change_source'] = [] -c['change_source'].append(changes.GitPoller( - 'git://github.com/buildbot/pyflakes.git', - workdir='gitpoller-workdir', branch='master', - pollInterval=300)) - -####### SCHEDULERS - -# Configure the Schedulers, which decide how to react to incoming changes. In this -# case, just kick off a 'runtests' build - -c['schedulers'] = [] -c['schedulers'].append(schedulers.SingleBranchScheduler( - name="all", - change_filter=util.ChangeFilter(branch='master'), - treeStableTimer=None, - builderNames=["runtests"])) -c['schedulers'].append(schedulers.ForceScheduler( - name="force", - builderNames=["runtests"])) - -####### BUILDERS - -# The 'builders' list defines the Builders, which tell Buildbot how to perform a build: -# what steps, and which workers can execute them. Note that any particular build will -# only take place on one worker. - -factory = util.BuildFactory() -# check out the source -factory.addStep(steps.Git(repourl='http://github.com/buildbot/pyflakes.git', mode='incremental')) -# run the tests (note that this will require that 'trial' is installed) -factory.addStep(steps.ShellCommand(command=["trial", "pyflakes"])) - -c['builders'] = [] -c['builders'].append( - util.BuilderConfig(name="runtests", - workernames=["example-worker"], - factory=factory)) - -####### REPORTER TARGETS - -# 'services' is a list of Reporter Targets. The results of each build will be -# pushed to these targets. buildbot/reporters/*.py has a variety to choose from, -# like IRC bots. - -c['services'] = [] - -####### PROJECT IDENTITY - -# the 'title' string will appear at the top of this buildbot installation's -# home pages (linked to the 'titleURL'). - -c['title'] = "Blender" -c['titleURL'] = "https://blender.org/download" - -# the 'buildbotURL' string should point to the location where the buildbot's -# internal web server is visible. This typically uses the port number set in -# the 'www' entry below, but with an externally-visible host name which the -# buildbot cannot figure out without some help. - -c['buildbotURL'] = os.environ.get("BUILDBOT_WEB_URL", "http://localhost:8010/") - -# minimalistic config to activate new web UI -c['www'] = dict(port=os.environ.get("BUILDBOT_WEB_PORT", 8010), - plugins=dict(waterfall_view={}, console_view={})) - -####### DB URL - -c['db'] = { - # This specifies what database buildbot uses to store its state. You can leave - # this at its default for all but the largest installations. - 'db_url' : os.environ.get("BUILDBOT_DB_URL", "sqlite://").format(**os.environ), -} \ No newline at end of file +import setup +importlib.reload(setup) +BuildmasterConfig = setup.setup() diff --git a/config/setup.py b/config/setup.py new file mode 100644 index 0000000..a4fdb9e --- /dev/null +++ b/config/setup.py @@ -0,0 +1,169 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import importlib +import os +import sys +import re +import pathlib + +import buildbot.plugins + +from typing import Any, Dict, List + +sys.path.append(str(pathlib.Path(__file__).resolve().parent)) + +import conf.auth +import conf.branches +import conf.machines +import conf.worker + +import gitea.blender + +import pipeline + +# We need to do this when we reload (SIGHUP) the buildbot server process. +importlib.reload(conf.auth) +importlib.reload(conf.branches) +importlib.reload(conf.machines) +importlib.reload(conf.worker) +importlib.reload(gitea.blender) +importlib.reload(pipeline) + +devops_env_id = os.environ.get("DEVOPS_ENV_ID", default="LOCAL") +devops_host_id = os.environ.get("DEVOPS_HOST_ID", default="localhost") + + +def setup() -> Dict[str, Any]: + ####### MAIN CONFIGURATION + c = {} + + # Change Source + c["change_source"] = pipeline.change_sources() + + # Workers + print("*** Creating platform workers") + platform_worker_names = conf.machines.fetch_platform_worker_names(devops_env_id) + workers: List[buildbot.plugins.worker.Worker] = [] + configured_worker_names = set() + for worker_names in platform_worker_names.values(): + for worker_name in worker_names: + if worker_name in configured_worker_names: + print(f"Skipping {worker_name}, already configured") + continue + configured_worker_names.add(worker_name) + workers += [ + buildbot.plugins.worker.Worker( + worker_name, + conf.machines.get_worker_password(devops_env_id, worker_name), + max_builds=1, + keepalive_interval=3600, + ) + ] + + print("*** Creating local workers") + local_worker_names = conf.machines.fetch_local_worker_names() + for worker_name in local_worker_names: + workers += [buildbot.plugins.worker.LocalWorker(worker_name)] + + c["workers"] = workers + + # Builders and Schedulers + builders, schedulers = pipeline.populate(devops_env_id) + c["builders"] = builders + c["schedulers"] = schedulers + + ####### BUILDBOT SERVICES + + # 'services' is a list of BuildbotService items like reporter targets. The + # status of each build will be pushed to these targets. buildbot/reporters/*.py + # has a variety to choose from, like IRC bots. + + gitea_status_service = gitea.blender.setup_service(devops_env_id) + if gitea_status_service: + c["services"] = [gitea_status_service] + else: + c["services"] = [] + + ####### PROJECT IDENTITY + + # the 'title' string will appear at the top of this buildbot installation's + # home pages (linked to the 'titleURL'). + + c["title"] = f"Bot - {devops_env_id}" + c["titleURL"] = "https://projects.blender.org" + + # the 'buildbotURL' string should point to the location where the buildbot's + # internal web server is visible. This typically uses the port number set in + # the 'www' entry below, but with an externally-visible host name which the + # buildbot cannot figure out without some help. + c["buildbotURL"] = f"http://{devops_host_id}:8010/" + + if devops_env_id != "LOCAL": + c["buildbotURL"] = f"http://{devops_host_id}:8000/admin/" + + if devops_env_id == "PROD": + c["buildbotURL"] = "https://builder.blender.org/admin/" + if devops_env_id == "UATEST": + c["buildbotURL"] = "https://builder.uatest.blender.org/admin/" + + # Minimalistic config to activate new web UI + c["www"] = dict( + port=8010, plugins=dict(waterfall_view={}, console_view={}, grid_view={}) + ) + + # Database + if devops_env_id == "LOCAL": + c["db"] = {"db_url": "sqlite:///state.sqlite"} + else: + # PostgreSQL database, as recommended for production environment. + c["db"] = {"db_url": "postgresql://buildbot@127.0.0.1/buildbot"} + + c["buildbotNetUsageData"] = None + + # Authentication + c["www"]["auth"] = conf.auth.fetch_authentication(devops_env_id) + + # Authorization + c["www"]["authz"] = conf.auth.fetch_authorization(devops_env_id) + + # Disable UI - does not work + c["www"]["plugins"] = { + "waterfall_view": False, + "console_view": False, + "grid_view": False, + } + + # UI Defaults + c["www"]["ui_default_config"] = { + "Grid.fullChanges": True, + "Grid.leftToRight": True, + "Grid.revisionLimit": 10, + "Builders.buildFetchLimit": 400, + "LogPreview.loadlines": 100, + "LogPreview.maxlines": 100, + "ChangeBuilds.buildsFetchLimit": 10, + } + + # Validation + c["validation"] = { + "branch": re.compile(r"^[\w.+/~-]*$"), + "revision": re.compile(r"^[ \w\.\-\/]*$"), + "property_name": re.compile(r"^[\w\.\-\/\~:]*$"), + "property_value": re.compile(r"^[\w\.\-\/\~:]*$"), + } + + # Rev link + c["revlink"] = buildbot.plugins.util.RevlinkMatch( + [r"https://projects.blender.org/([^/]*)/([^/]*?)(?:\.git)?$"], + r"https://projects.blender.org/\1/\2/commit/%s", + ) + + # Port for workers to connectto + c["protocols"] = {"pb": {"port": 9989}} + + # Disable collapsing requests + c["collapseRequests"] = False + + return c diff --git a/docker-compose.override.yml b/docker-compose.override.yml new file mode 100644 index 0000000..cae2114 --- /dev/null +++ b/docker-compose.override.yml @@ -0,0 +1,5 @@ +services: + buildbot-master: + env_file: .env + volumes: + - ./config:/buildbot/config \ No newline at end of file diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 0000000..f7d3a54 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +warn_unused_configs = True +ignore_missing_imports = True \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..e3b95ac --- /dev/null +++ b/requirements.txt @@ -0,0 +1,2 @@ +ruff +mypy \ No newline at end of file -- 2.45.2 From da6339d74d1744131f35f9bb0d530822730b16b3 Mon Sep 17 00:00:00 2001 From: Bart van der Braak Date: Tue, 19 Nov 2024 20:04:14 +0100 Subject: [PATCH 03/13] Fix healthchecks and volumes --- docker-compose.yml | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 87d0094..3db7be5 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -18,7 +18,7 @@ services: - CMD - curl - '-f' - - 'http://127.0.0.1:8010' + - 'http://localhost:$${BUILDBOT_WEB_PORT}' interval: 2s timeout: 10s retries: 15 @@ -50,3 +50,14 @@ services: - 'WORKERNAME=${WORKERNAME:-example-worker}' - 'WORKERPASS=${WORKERPASS:-pass}' - 'WORKER_ENVIRONMENT_BLACKLIST=${WORKER_ENVIRONMENT_BLACKLIST:-DOCKER_BUILDBOT* BUILDBOT_ENV_* BUILDBOT_1* WORKER_ENVIRONMENT_BLACKLIST}' + healthcheck: + test: + - CMD + - curl + - '-f' + - 'http://$${BUILDMASTER}:$${BUILDMASTER_PORT}' + interval: 5s + timeout: 20s + retries: 10 +volumes: + buildbot-db: {} \ No newline at end of file -- 2.45.2 From 18e653fd2e4cfa4965b392c5d61749e58a416e01 Mon Sep 17 00:00:00 2001 From: Bart van der Braak Date: Tue, 19 Nov 2024 20:04:36 +0100 Subject: [PATCH 04/13] Setup python and make --- .forgejo/workflows/check.yml | 17 +++++++++++------ Dockerfile | 0 Makefile | 10 ++++++++-- config/master.cfg | 3 --- config/setup.py | 2 +- 5 files changed, 20 insertions(+), 12 deletions(-) create mode 100644 Dockerfile diff --git a/.forgejo/workflows/check.yml b/.forgejo/workflows/check.yml index 8f68941..9d564eb 100644 --- a/.forgejo/workflows/check.yml +++ b/.forgejo/workflows/check.yml @@ -1,14 +1,19 @@ name: Run checks -on: [ push, pull_request ] +on: + pull_request: + branches: + - main jobs: checks: runs-on: docker + container: + image: ghcr.io/catthehacker/ubuntu:act-22.04 env: DIRECTORY: config steps: - uses: actions/checkout@v4 - - run: cd ${{ env.DIRECTORY }} - - run: pip install -r requirements-dev.txt - - run: ruff check - - run: ruff format - - run: myp \ No newline at end of file + - uses: actions/setup-python@v5 + with: + python-version: '3.13' + - run: pip install -r requirements.txt + - run: make check \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..e69de29 diff --git a/Makefile b/Makefile index cc95b6a..4d8df3d 100644 --- a/Makefile +++ b/Makefile @@ -12,5 +12,11 @@ setup: ## Create Python virtualenv and install dependencies @if [ ! -d .venv ]; then python3 -m venv .venv; fi @if [ -d .venv ]; then . .venv/bin/activate && pip3 install -r requirements.txt; fi -check: ## Run linter, formatter and typechecks - ruff check && ruff format && mypy config/master.cfg \ No newline at end of file +check: ## Check linting, formatting and types + ruff check + ruff format --check + mypy config/master.cfg + +format: ## Autofix linting and formatting issues + ruff check --fix + ruff format \ No newline at end of file diff --git a/config/master.cfg b/config/master.cfg index 8d4475a..b1e1d40 100644 --- a/config/master.cfg +++ b/config/master.cfg @@ -3,9 +3,6 @@ import importlib import os import sys - -sys.path.insert(0, os.path.expanduser("~/git/blender-devops/buildbot")) - import setup importlib.reload(setup) BuildmasterConfig = setup.setup() diff --git a/config/setup.py b/config/setup.py index a4fdb9e..661c973 100644 --- a/config/setup.py +++ b/config/setup.py @@ -36,7 +36,7 @@ devops_host_id = os.environ.get("DEVOPS_HOST_ID", default="localhost") def setup() -> Dict[str, Any]: - ####### MAIN CONFIGURATION + ####### MAIN - configuration c = {} # Change Source -- 2.45.2 From 0a1454d25024791338ef018e7a54268a2944b69a Mon Sep 17 00:00:00 2001 From: Bart van der Braak Date: Tue, 19 Nov 2024 21:41:39 +0100 Subject: [PATCH 05/13] Add back further changes from blender-devops --- .forgejo/workflows/check.yml | 1 - config/conf/__init__.py | 0 config/conf/auth.py | 106 +++ config/conf/branches.py | 106 +++ config/conf/local/__init__.py | 0 config/conf/local/auth.py | 28 + config/conf/local/machines.py | 31 + config/conf/local/worker.py | 87 +++ config/conf/machines.py | 39 ++ config/conf/worker.py | 37 ++ config/gitea/LICENSE | 21 + config/gitea/README.md | 4 + config/gitea/__init__.py | 0 config/gitea/blender.py | 62 ++ config/gitea/reporter.py | 279 ++++++++ config/pipeline/__init__.py | 101 +++ config/pipeline/code.py | 748 ++++++++++++++++++++++ config/pipeline/code_benchmark.py | 94 +++ config/pipeline/code_bpy_deploy.py | 30 + config/pipeline/code_deploy.py | 43 ++ config/pipeline/code_store.py | 235 +++++++ config/pipeline/common.py | 335 ++++++++++ config/pipeline/doc_api.py | 54 ++ config/pipeline/doc_developer.py | 32 + config/pipeline/doc_manual.py | 44 ++ config/pipeline/doc_studio.py | 32 + config/worker/__init__.py | 0 config/worker/archive.py | 346 ++++++++++ config/worker/blender/__init__.py | 185 ++++++ config/worker/blender/benchmark.py | 125 ++++ config/worker/blender/blender.applescript | 25 + config/worker/blender/bundle_dmg.py | 473 ++++++++++++++ config/worker/blender/compile.py | 534 +++++++++++++++ config/worker/blender/cpack_post.cmake | 34 + config/worker/blender/cpack_post.py | 30 + config/worker/blender/lint.py | 45 ++ config/worker/blender/msix_package.py | 114 ++++ config/worker/blender/pack.py | 357 +++++++++++ config/worker/blender/sign.py | 195 ++++++ config/worker/blender/test.py | 60 ++ config/worker/blender/update.py | 53 ++ config/worker/blender/version.py | 52 ++ config/worker/code.py | 42 ++ config/worker/code_benchmark.py | 43 ++ config/worker/code_bpy_deploy.py | 35 + config/worker/code_deploy.py | 40 ++ config/worker/code_store.py | 59 ++ config/worker/configure.py | 199 ++++++ config/worker/deploy/__init__.py | 41 ++ config/worker/deploy/artifacts.py | 251 ++++++++ config/worker/deploy/monitor.py | 110 ++++ config/worker/deploy/pypi.py | 103 +++ config/worker/deploy/snap.py | 161 +++++ config/worker/deploy/source.py | 38 ++ config/worker/deploy/steam.py | 260 ++++++++ config/worker/deploy/windows.py | 116 ++++ config/worker/doc_api.py | 230 +++++++ config/worker/doc_developer.py | 79 +++ config/worker/doc_manual.py | 289 +++++++++ config/worker/doc_studio.py | 96 +++ config/worker/utils.py | 549 ++++++++++++++++ 61 files changed, 7917 insertions(+), 1 deletion(-) create mode 100644 config/conf/__init__.py create mode 100644 config/conf/auth.py create mode 100644 config/conf/branches.py create mode 100644 config/conf/local/__init__.py create mode 100644 config/conf/local/auth.py create mode 100644 config/conf/local/machines.py create mode 100644 config/conf/local/worker.py create mode 100644 config/conf/machines.py create mode 100644 config/conf/worker.py create mode 100644 config/gitea/LICENSE create mode 100644 config/gitea/README.md create mode 100644 config/gitea/__init__.py create mode 100644 config/gitea/blender.py create mode 100644 config/gitea/reporter.py create mode 100644 config/pipeline/__init__.py create mode 100644 config/pipeline/code.py create mode 100644 config/pipeline/code_benchmark.py create mode 100644 config/pipeline/code_bpy_deploy.py create mode 100644 config/pipeline/code_deploy.py create mode 100644 config/pipeline/code_store.py create mode 100644 config/pipeline/common.py create mode 100644 config/pipeline/doc_api.py create mode 100644 config/pipeline/doc_developer.py create mode 100644 config/pipeline/doc_manual.py create mode 100644 config/pipeline/doc_studio.py create mode 100644 config/worker/__init__.py create mode 100755 config/worker/archive.py create mode 100644 config/worker/blender/__init__.py create mode 100644 config/worker/blender/benchmark.py create mode 100644 config/worker/blender/blender.applescript create mode 100644 config/worker/blender/bundle_dmg.py create mode 100644 config/worker/blender/compile.py create mode 100644 config/worker/blender/cpack_post.cmake create mode 100644 config/worker/blender/cpack_post.py create mode 100644 config/worker/blender/lint.py create mode 100644 config/worker/blender/msix_package.py create mode 100644 config/worker/blender/pack.py create mode 100644 config/worker/blender/sign.py create mode 100644 config/worker/blender/test.py create mode 100644 config/worker/blender/update.py create mode 100644 config/worker/blender/version.py create mode 100755 config/worker/code.py create mode 100755 config/worker/code_benchmark.py create mode 100755 config/worker/code_bpy_deploy.py create mode 100755 config/worker/code_deploy.py create mode 100755 config/worker/code_store.py create mode 100644 config/worker/configure.py create mode 100644 config/worker/deploy/__init__.py create mode 100644 config/worker/deploy/artifacts.py create mode 100644 config/worker/deploy/monitor.py create mode 100644 config/worker/deploy/pypi.py create mode 100644 config/worker/deploy/snap.py create mode 100644 config/worker/deploy/source.py create mode 100644 config/worker/deploy/steam.py create mode 100644 config/worker/deploy/windows.py create mode 100755 config/worker/doc_api.py create mode 100755 config/worker/doc_developer.py create mode 100755 config/worker/doc_manual.py create mode 100755 config/worker/doc_studio.py create mode 100644 config/worker/utils.py diff --git a/.forgejo/workflows/check.yml b/.forgejo/workflows/check.yml index 9d564eb..3cd9af4 100644 --- a/.forgejo/workflows/check.yml +++ b/.forgejo/workflows/check.yml @@ -1,4 +1,3 @@ -name: Run checks on: pull_request: branches: diff --git a/config/conf/__init__.py b/config/conf/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/config/conf/auth.py b/config/conf/auth.py new file mode 100644 index 0000000..93efe66 --- /dev/null +++ b/config/conf/auth.py @@ -0,0 +1,106 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import importlib + +import buildbot.plugins + + +def _get_auth_config(devops_env_id: str): + if devops_env_id == "LOCAL": + import conf.local.auth + + importlib.reload(conf.local.auth) + return conf.local.auth + else: + import conf.production.auth + + importlib.reload(conf.production.auth) + return conf.production.auth + + +def fetch_authentication(devops_env_id: str): + auth_config = _get_auth_config(devops_env_id) + return auth_config.get_authentication(devops_env_id) + + +def fetch_authorization(devops_env_id: str): + auth_config = _get_auth_config(devops_env_id) + + admin_usernames = auth_config.admin_usernames + deploy_dev_usernames = auth_config.deploy_dev_usernames + trusted_dev_usernames = auth_config.trusted_dev_usernames + + dev_usernames = list(set(deploy_dev_usernames + trusted_dev_usernames + admin_usernames)) + deploy_usernames = list(set(deploy_dev_usernames + admin_usernames)) + + file_based_group_username_role_matchers = [ + buildbot.plugins.util.RolesFromUsername(roles=["admin"], usernames=admin_usernames), + buildbot.plugins.util.RolesFromUsername(roles=["deploy"], usernames=deploy_usernames), + buildbot.plugins.util.RolesFromUsername(roles=["dev"], usernames=dev_usernames), + ] + + my_authz = buildbot.plugins.util.Authz( + stringsMatcher=buildbot.plugins.util.fnmatchStrMatcher, + allowRules=[ + # Admins can do anything, + # + # defaultDeny=False: if user does not have the admin role, we continue + # parsing rules + # buildbot.plugins.util.AnyEndpointMatcher(role='admin', defaultDeny=False), + # buildbot.plugins.util.AnyEndpointMatcher(role='dev', defaultDeny=False), + # buildbot.plugins.util.AnyEndpointMatcher(role='coordinator', defaultDeny=False), + # buildbot.plugins.util.AnyEndpointMatcher(role='anonymous', defaultDeny=False), + buildbot.plugins.util.StopBuildEndpointMatcher(role="dev", defaultDeny=True), + buildbot.plugins.util.RebuildBuildEndpointMatcher(role="dev", defaultDeny=True), + buildbot.plugins.util.EnableSchedulerEndpointMatcher(role="admin", defaultDeny=True), + # buildbot.plugins.util.AnyEndpointMatcher(role='any', defaultDeny=False), + # Force roles + buildbot.plugins.util.ForceBuildEndpointMatcher( + builder="*-code-experimental-*", role="dev", defaultDeny=True + ), + buildbot.plugins.util.ForceBuildEndpointMatcher( + builder="*-code-patch-*", role="dev", defaultDeny=True + ), + buildbot.plugins.util.ForceBuildEndpointMatcher( + builder="*-code-daily-*", role="dev", defaultDeny=True + ), + buildbot.plugins.util.ForceBuildEndpointMatcher( + builder="*-store-*", role="deploy", defaultDeny=True + ), + buildbot.plugins.util.ForceBuildEndpointMatcher( + builder="*-deploy-*", role="deploy", defaultDeny=True + ), + buildbot.plugins.util.ForceBuildEndpointMatcher( + builder="*-doc-*", role="dev", defaultDeny=True + ), + # Rebuild roles + buildbot.plugins.util.RebuildBuildEndpointMatcher( + builder="*-code-experimental-*", role="dev", defaultDeny=True + ), + buildbot.plugins.util.RebuildBuildEndpointMatcher( + builder="*-code-patch-*", role="dev", defaultDeny=True + ), + buildbot.plugins.util.RebuildBuildEndpointMatcher( + builder="*-code-daily-*", role="dev", defaultDeny=True + ), + buildbot.plugins.util.RebuildBuildEndpointMatcher( + builder="*-store-*", role="deploy", defaultDeny=True + ), + buildbot.plugins.util.RebuildBuildEndpointMatcher( + builder="*-deploy-*", role="deploy", defaultDeny=True + ), + buildbot.plugins.util.RebuildBuildEndpointMatcher( + builder="*-doc-*", role="dev", defaultDeny=True + ), + # This also affects starting jobs via force scheduler + buildbot.plugins.util.AnyControlEndpointMatcher(role="admin", defaultDeny=True), + # A default deny for any endpoint if not admin + # If this is missing at the end, any UNMATCHED group will get 'allow'... + buildbot.plugins.util.AnyControlEndpointMatcher(role="admin", defaultDeny=True), + ], + roleMatchers=file_based_group_username_role_matchers, + ) + + return my_authz diff --git a/config/conf/branches.py b/config/conf/branches.py new file mode 100644 index 0000000..fff848c --- /dev/null +++ b/config/conf/branches.py @@ -0,0 +1,106 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import copy + +from collections import OrderedDict + +# Blender repository branches used for daily builds and API doc generation. +code_tracked_branch_ids = { + "vdev": "main", + "vexp": "", + "v360": "blender-v3.6-release", + "v420": "blender-v4.2-release", + "v430": "blender-v4.3-release", +} + +# Processor architectures to build for each track. +code_official_platform_architectures = { + "vdev": ["darwin-x86_64", "darwin-arm64", "linux-x86_64", "windows-amd64"], + "vexp": ["darwin-x86_64", "darwin-arm64", "linux-x86_64", "windows-amd64"], + "v360": ["darwin-x86_64", "darwin-arm64", "linux-x86_64", "windows-amd64"], + "v420": ["darwin-x86_64", "darwin-arm64", "linux-x86_64", "windows-amd64"], + "v430": ["darwin-x86_64", "darwin-arm64", "linux-x86_64", "windows-amd64"], +} + +# Windows ARM64 not used by default yet. +code_all_platform_architectures = copy.deepcopy(code_official_platform_architectures) +code_all_platform_architectures["vdev"].append("windows-arm64") +code_all_platform_architectures["vexp"].append("windows-arm64") +code_all_platform_architectures["v430"].append("windows-arm64") + +track_major_minor_versions = { + "vdev": "4.4", + "vexp": "4.4", + "v360": "3.6", + "v330": "3.3", + "v420": "4.2", + "v430": "4.3", +} + +# Blender code and manual git branches. +track_code_branches = { + "vdev": "main", + "vexp": "main", + "v360": "blender-v3.6-release", + "v420": "blender-v4.2-release", + "v430": "blender-v4.3-release", +} + +# Tracks that correspond to an LTS version released on the Windows Store. +# Only add entries here AFTER the regular release is out, since it will +# otherwise generate the wrong package for the regular release. +windows_store_lts_tracks = ["v360", "v420"] + +# Tracks that correspond to active and upcoming LTS releases. Used for +# the Snap track name, and for Steam to determine if there is a daily LTS +# track to upload to. +all_lts_tracks = ["v360", "v420"] + +# Tracks for automated delivery of daily builds to stores. +code_store_track_ids = [ + "vdev", + "v360", + "v420", + "v430", +] + +# Tracks to deploy releases (regular and LTS) to download.blender.org. +code_deploy_track_ids = { + "v360": None, + "v420": None, + "v430": None, +} + +# Stable track for manual and API docs. +# Update on release. +doc_stable_major_minor_version = "4.3" + +# Versions and labels for the user manual version switching menu. +# Update when creating new release branch, and on release. +doc_manual_version_labels = OrderedDict( + [ + ("2.79", "2.79"), + ("2.80", "2.80"), + ("2.81", "2.81"), + ("2.82", "2.82"), + ("2.83", "2.83 (LTS)"), + ("2.90", "2.90"), + ("2.91", "2.91"), + ("2.92", "2.92"), + ("2.93", "2.93 (LTS)"), + ("3.0", "3.0"), + ("3.1", "3.1"), + ("3.2", "3.2"), + ("3.3", "3.3 (LTS)"), + ("3.4", "3.4"), + ("3.5", "3.5"), + ("3.6", "3.6 (LTS)"), + ("4.0", "4.0"), + ("4.1", "4.1"), + ("4.2", "4.2 (LTS)"), + ("4.3", "4.3"), + ("4.4", "4.4 (develop)"), + ] +) diff --git a/config/conf/local/__init__.py b/config/conf/local/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/config/conf/local/auth.py b/config/conf/local/auth.py new file mode 100644 index 0000000..b677be9 --- /dev/null +++ b/config/conf/local/auth.py @@ -0,0 +1,28 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import buildbot.plugins + +# Buildbot admin with access to everything. +admin_usernames = [ + "admin", +] + +# Release engineers with access to store and deploy builders. +deploy_dev_usernames = [ + "admin", +] + +# Trusted developers with access to trigger daily, doc and patch builds. +trusted_dev_usernames = [ + "admin", +] + + +def get_authentication(devops_env_id: str): + class LocalEnvAuth(buildbot.plugins.util.CustomAuth): + def check_credentials(self, user, password): + return user.decode() == "admin" and password.decode() == "admin" + + return LocalEnvAuth() diff --git a/config/conf/local/machines.py b/config/conf/local/machines.py new file mode 100644 index 0000000..9087e67 --- /dev/null +++ b/config/conf/local/machines.py @@ -0,0 +1,31 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +_worker_names = { + "code-lint": ["localhost"], + "linux-x86_64-code": ["localhost"], + "linux-x86_64-code-gpu": ["localhost"], + "linux-x86_64-doc-api": ["localhost"], + "linux-x86_64-doc-studio-tools": ["localhost"], + "linux-x86_64-general": ["localhost"], + "linux-x86_64-store-snap": ["localhost"], + "linux-x86_64-store-steam": ["localhost"], + "darwin-arm64-code": ["localhost"], + "darwin-arm64-code-gpu": ["localhost"], + "darwin-x86_64-code": ["localhost"], + "darwin-x86_64-code-gpu": ["localhost"], + "windows-amd64-code": ["localhost"], + "windows-amd64-code-gpu": [], + "windows-amd64-store-windows": ["localhost"], + "windows-arm64-code": ["localhost"], + "windows-arm64-code-gpu": [], +} + + +def get_worker_password(worker_name: str) -> str: + return "localhost" + + +def get_worker_names(devops_env_id: str): + return _worker_names diff --git a/config/conf/local/worker.py b/config/conf/local/worker.py new file mode 100644 index 0000000..e178165 --- /dev/null +++ b/config/conf/local/worker.py @@ -0,0 +1,87 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import os +import pathlib + +from typing import Optional, Tuple + +# Where tracks data is stored. +tracks_root_path = pathlib.Path.home() / "git" + +# Software cache +software_cache_path = tracks_root_path / "downloads" / "software" / "workers" + +# Docs delivery. +docs_user = os.getlogin() +docs_machine = "127.0.0.1" +docs_folder = tracks_root_path / "delivery" / "docs" +docs_port = 22 + +# Studio docs delivery. +studio_user = os.getlogin() +studio_machine = "127.0.0.1" +studio_folder = tracks_root_path / "delivery" / "studio" / "blender-studio-tools" +studio_port = 22 + +# Download delivery. +download_user = os.getlogin() +download_machine = "127.0.0.1" +download_source_folder = tracks_root_path / "delivery" / "download" / "source" +download_release_folder = tracks_root_path / "delivery" / "download" / "release" +download_port = 22 + +# Buildbot download delivery +buildbot_download_folder = tracks_root_path / "delivery" / "buildbot" + +# Code signing +sign_code_windows_certificate = None # "Blender Self Code Sign SPC" +sign_code_windows_time_servers = ["http://ts.ssl.com"] +sign_code_windows_server_url = "http://fake-windows-sign-server" + +sign_code_darwin_certificate = None +sign_code_darwin_team_id = None +sign_code_darwin_apple_id = None +sign_code_darwin_keychain_profile = None + + +def darwin_keychain_password(service_env_id: str) -> str: + return "fake_keychain_password" + + +# Steam +steam_app_id = None +steam_platform_depot_ids = { + "windows": None, + "linux": None, + "darwin": None, +} + + +def steam_credentials(service_env_id: str) -> Tuple[str, str]: + return "fake_steam_username", "fake_steam_password" + + +# Snap +def snap_credentials(service_env_id: str) -> str: + return "fake_snap_credentials" + + +# Windows Store +windows_store_self_sign = False + + +def windows_store_certificate(service_env_id: str) -> str: + # return sign_code_windows_certificate + return "fake_windows_store_publisher" + + +# PyPI +def pypi_token(service_env_id: str) -> str: + return "fake_pypi_token" + + +# Gitea +def gitea_api_token(service_env_id: str) -> Optional[str]: + return None diff --git a/config/conf/machines.py b/config/conf/machines.py new file mode 100644 index 0000000..55b1aa5 --- /dev/null +++ b/config/conf/machines.py @@ -0,0 +1,39 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import importlib + + +def _get_config(devops_env_id: str): + if devops_env_id == "LOCAL": + import conf.local.machines + + importlib.reload(conf.local.machines) + return conf.local.machines + else: + import conf.production.machines + + importlib.reload(conf.production.machines) + return conf.production.machines + + +def fetch_platform_worker_names(devops_env_id: str): + machines_config = _get_config(devops_env_id) + return machines_config.get_worker_names(devops_env_id) + + +def get_worker_password(devops_env_id: str, worker_name: str) -> str: + machines_config = _get_config(devops_env_id) + return machines_config.get_worker_password(worker_name) + + +def fetch_local_worker_names(): + worker_names = [] + worker_numbers = range(1, 5, 1) + for worker_number in worker_numbers: + worker_id = str(worker_number).zfill(2) + worker_name = f"local-coordinator-{worker_id}" + worker_names += [worker_name] + + return worker_names diff --git a/config/conf/worker.py b/config/conf/worker.py new file mode 100644 index 0000000..963becf --- /dev/null +++ b/config/conf/worker.py @@ -0,0 +1,37 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import importlib + +from typing import Any + + +def get_config(devops_env_id: str) -> Any: + if devops_env_id == "LOCAL": + import conf.local.worker + + importlib.reload(conf.local.worker) + return conf.local.worker + else: + import conf.production.worker + + importlib.reload(conf.production.worker) + return conf.production.worker + + +# Maybe useful in the future. +# +# import pathlib +# import importlib.util +# +# def _load_module_config(path: pathlib.Path) -> Any: +# filepath = pathlib.Path(__file__).parent / path +# spec = importlib.util.spec_from_file_location("config_module", filepath) +# if not spec: +# raise BaseException("Failed to load config module spec") +# config_module = importlib.util.module_from_spec(spec) +# if not spec.loader: +# raise BaseException("Failed to load config module spec loader") +# spec.loader.exec_module(config_module) +# return config_module diff --git a/config/gitea/LICENSE b/config/gitea/LICENSE new file mode 100644 index 0000000..be288ed --- /dev/null +++ b/config/gitea/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 LAB132 + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/config/gitea/README.md b/config/gitea/README.md new file mode 100644 index 0000000..7d7eb6a --- /dev/null +++ b/config/gitea/README.md @@ -0,0 +1,4 @@ +### Buildbot Gitea Integration + +Based on: +https://github.com/lab132/buildbot-gitea diff --git a/config/gitea/__init__.py b/config/gitea/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/config/gitea/blender.py b/config/gitea/blender.py new file mode 100644 index 0000000..8f53811 --- /dev/null +++ b/config/gitea/blender.py @@ -0,0 +1,62 @@ +# SPDX-License-Identifier: MIT +# SPDX-FileCopyrightText: 2018 LAB132 +# SPDX-FileCopyrightText: 2013-2024 Blender Authors +# + +# Based on the gitlab reporter from buildbot + +from twisted.python import log + +import buildbot.plugins + +import importlib +import requests + +import gitea.reporter + +importlib.reload(gitea.reporter) + +# Create status reporter service. +gitea_url = "https://projects.blender.org" +gitea_api_token = None +gitea_status_service = None + + +def setup_service(devops_env_id: str): + import conf.worker + + importlib.reload(conf.worker) + worker_config = conf.worker.get_config(devops_env_id) + gitea_api_token = worker_config.gitea_api_token(devops_env_id) + + if gitea_api_token: + log.msg("Found Gitea API token, enabling status push") + return gitea.reporter.GiteaStatusService11(gitea_url, gitea_api_token, verbose=False) + else: + log.msg("No Gitea API token found, status push disabled") + return None + + +# Get revision for coordinator. +@buildbot.plugins.util.renderer +def get_patch_revision(props): + if "revision" in props and props["revision"]: + return {} + if "pull_revision" in props and props["pull_revision"]: + return {"revision": props["pull_revision"]} + pull_id = props["patch_id"] + url = f"{gitea_url}/api/v1/repos/blender/blender/pulls/{pull_id}" + response = requests.get(url, headers={"accept": "application/json"}) + sha = response.json().get("head", {"sha": ""}).get("sha") + return {"revision": sha} + + +@buildbot.plugins.util.renderer +def get_branch_revision(props): + if "revision" in props and props["revision"]: + return {} + branch = props["override_branch_id"] + url = f"{gitea_url}/api/v1/repos/blender/blender/git/commits/{branch}" + response = requests.get(url, headers={"accept": "application/json"}) + sha = response.json().get("sha", "") + return {"revision": sha} diff --git a/config/gitea/reporter.py b/config/gitea/reporter.py new file mode 100644 index 0000000..1e1f610 --- /dev/null +++ b/config/gitea/reporter.py @@ -0,0 +1,279 @@ +# SPDX-License-Identifier: MIT +# SPDX-FileCopyrightText: 2018 LAB132 +# SPDX-FileCopyrightText: 2013-2024 Blender Authors +# + +# Based on the gitlab reporter from buildbot + +from __future__ import absolute_import +from __future__ import print_function + +from twisted.internet import defer +from twisted.python import log + +from buildbot.process.properties import Interpolate +from buildbot.process.properties import Properties +from buildbot.process.results import CANCELLED +from buildbot.process.results import EXCEPTION +from buildbot.process.results import FAILURE +from buildbot.process.results import RETRY +from buildbot.process.results import SKIPPED +from buildbot.process.results import SUCCESS +from buildbot.process.results import WARNINGS +from buildbot.reporters import http +from buildbot.util import httpclientservice +from buildbot.reporters.generators.build import BuildStartEndStatusGenerator +from buildbot.reporters.message import MessageFormatterRenderable + + +import re + + +# This name has a number in it to trick buildbot into reloading it on without +# restart. Needs to be incremented every time this file is changed. Is there +# a better solution? +class GiteaStatusService11(http.ReporterBase): + name = "GiteaStatusService11" + ssh_url_match = re.compile( + r"(ssh://)?[\w+\-\_]+@[\w\.\-\_]+:?(\d*/)?(?P[\w_\-\.]+)/(?P[\w_\-\.]+?)(\.git)?$" + ) + + def checkConfig( + self, + baseURL, + token, + context=None, + context_pr=None, + verbose=False, + debug=None, + verify=None, + generators=None, + warningAsSuccess=False, + **kwargs, + ): + if generators is None: + generators = self._create_default_generators() + + super().checkConfig(generators=generators, **kwargs) + httpclientservice.HTTPClientService.checkAvailable(self.__class__.__name__) + + @defer.inlineCallbacks + def reconfigService( + self, + baseURL, + token, + context=None, + context_pr=None, + verbose=False, + debug=None, + verify=None, + generators=None, + warningAsSuccess=False, + **kwargs, + ): + token = yield self.renderSecrets(token) + self.debug = debug + self.verify = verify + self.verbose = verbose + if generators is None: + generators = self._create_default_generators() + + yield super().reconfigService(generators=generators, **kwargs) + + self.context = context or Interpolate("buildbot/%(prop:buildername)s") + self.context_pr = context_pr or Interpolate("buildbot/pull_request/%(prop:buildername)s") + if baseURL.endswith("/"): + baseURL = baseURL[:-1] + self.baseURL = baseURL + self._http = yield httpclientservice.HTTPClientService.getService( + self.master, + baseURL, + headers={"Authorization": "token {}".format(token)}, + debug=self.debug, + verify=self.verify, + ) + self.verbose = verbose + self.project_ids = {} + self.warningAsSuccess = warningAsSuccess + + def _create_default_generators(self): + start_formatter = MessageFormatterRenderable("Build started.") + end_formatter = MessageFormatterRenderable("Build done.") + + return [ + BuildStartEndStatusGenerator( + start_formatter=start_formatter, end_formatter=end_formatter + ) + ] + + def createStatus( + self, project_owner, repo_name, sha, state, target_url=None, description=None, context=None + ): + """ + :param project_owner: username of the owning user or organization + :param repo_name: name of the repository + :param sha: Full sha to create the status for. + :param state: one of the following 'pending', 'success', 'failed' + or 'cancelled'. + :param target_url: Target url to associate with this status. + :param description: Short description of the status. + :param context: Context of the result + :return: A deferred with the result from GitLab. + + """ + payload = {"state": state} + + if description is not None: + payload["description"] = description + + if target_url is not None: + payload["target_url"] = target_url + + if context is not None: + payload["context"] = context + + url = "/api/v1/repos/{owner}/{repository}/statuses/{sha}".format( + owner=project_owner, repository=repo_name, sha=sha + ) + log.msg(f"Sending status to {url}: {payload}") + + return self._http.post(url, json=payload) + + @defer.inlineCallbacks + def sendMessage(self, reports): + yield self._send_impl(reports) + + @defer.inlineCallbacks + def _send_status( + self, build, repository_owner, repository_name, sha, state, context, description + ): + try: + target_url = build["url"] + res = yield self.createStatus( + project_owner=repository_owner, + repo_name=repository_name, + sha=sha, + state=state, + target_url=target_url, + context=context, + description=description, + ) + if res.code not in (200, 201, 204): + message = yield res.json() + message = message.get("message", "unspecified error") + log.msg( + 'Could not send status "{state}" for ' + "{repo} at {sha}: {code} : {message}".format( + state=state, repo=repository_name, sha=sha, code=res.code, message=message + ) + ) + elif self.verbose: + log.msg( + 'Status "{state}" sent for ' + "{repo} at {sha}.".format(state=state, repo=repository_name, sha=sha) + ) + except Exception as e: + log.err( + e, + 'Failed to send status "{state}" for ' + "{repo} at {sha}".format(state=state, repo=repository_name, sha=sha), + ) + + @defer.inlineCallbacks + def _send_impl(self, reports): + for report in reports: + try: + builds = report["builds"] + except KeyError: + continue + + for build in builds: + builder_name = build["builder"]["name"] + + props = Properties.fromDict(build["properties"]) + props.master = self.master + + description = report.get("body", None) + + if build["complete"]: + state = { + SUCCESS: "success", + WARNINGS: "success" if self.warningAsSuccess else "warning", + FAILURE: "failure", + SKIPPED: "success", + EXCEPTION: "error", + RETRY: "pending", + CANCELLED: "error", + }.get(build["results"], "failure") + else: + state = "pending" + + if "pr_id" in props: + context = yield props.render(self.context_pr) + else: + context = yield props.render(self.context) + + sourcestamps = build["buildset"]["sourcestamps"] + + # BLENDER: some hardcoded logic for now. + if ( + "-code-daily-" in builder_name + or "-code-patch-" in builder_name + or "-code-experimental-" in builder_name + ): + repository_owner = "blender" + repository_name = "blender" + elif "-doc-manual-" in builder_name: + repository_owner = "blender" + repository_name = "blender-manual" + elif "-doc-developer" in builder_name: + repository_owner = "blender" + repository_name = "blender-developer-docs" + else: + continue + + # Source change from Git poller. + for sourcestamp in sourcestamps: + sha = sourcestamp["revision"] + if sha not in {None, "", "HEAD"}: + self._send_status( + build, + repository_owner, + repository_name, + sha, + state, + context, + description, + ) + continue + + # Revision specified by get-revision step. + if "revision" in props: + sha = props["revision"] + if sha not in {None, "", "HEAD"}: + self._send_status( + build, + repository_owner, + repository_name, + sha, + state, + context, + description, + ) + + # Revision from blender-bot, so we can send a status before + # the get-revision step runs. + if "pull_revision" in props: + sha = props["pull_revision"] + if sha not in {None, "", "HEAD"}: + self._send_status( + build, + repository_owner, + repository_name, + sha, + state, + context, + description, + ) + + continue diff --git a/config/pipeline/__init__.py b/config/pipeline/__init__.py new file mode 100644 index 0000000..5ee14bc --- /dev/null +++ b/config/pipeline/__init__.py @@ -0,0 +1,101 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import importlib + +from buildbot.plugins import changes as plugins_changes + +import conf.branches + +import pipeline.common +import pipeline.code +import pipeline.code_benchmark +import pipeline.code_deploy +import pipeline.code_bpy_deploy +import pipeline.code_store +import pipeline.doc_api +import pipeline.doc_manual +import pipeline.doc_developer +import pipeline.doc_studio + +importlib.reload(pipeline.common) +importlib.reload(conf.branches) + + +def populate(devops_env_id): + pipelines_modules = [ + pipeline.code, + pipeline.code_benchmark, + pipeline.code_deploy, + pipeline.code_bpy_deploy, + pipeline.code_store, + pipeline.doc_api, + pipeline.doc_manual, + pipeline.doc_developer, + pipeline.doc_studio, + ] + + builders = [] + schedulers = [] + + for pipelines_module in pipelines_modules: + importlib.reload(pipelines_module) + b, s = pipelines_module.populate(devops_env_id) + builders += b + schedulers += s + + return builders, schedulers + + +def change_sources(): + branch_ids = list(conf.branches.code_tracked_branch_ids.values()) + + pollers = [] + poll_interval_in_seconds = 2 * 60 + + pollers += [ + plugins_changes.GitPoller( + repourl="https://projects.blender.org/blender/blender.git", + pollAtLaunch=True, + pollInterval=poll_interval_in_seconds, + workdir="blender-gitpoller-workdir", + project="blender.git", + branches=branch_ids, + ) + ] + + pollers += [ + plugins_changes.GitPoller( + repourl="https://projects.blender.org/blender/blender-manual.git", + pollAtLaunch=True, + pollInterval=poll_interval_in_seconds, + workdir="blender-manual-gitpoller-workdir", + project="blender-manual.git", + branches=branch_ids, + ) + ] + + pollers += [ + plugins_changes.GitPoller( + repourl="https://projects.blender.org/blender/blender-developer-docs.git", + pollAtLaunch=True, + pollInterval=poll_interval_in_seconds, + workdir="blender-developer-docs-gitpoller-workdir", + project="blender-developer-docs.git", + branches=["main"], + ) + ] + + pollers += [ + plugins_changes.GitPoller( + repourl="https://projects.blender.org/studio/blender-studio-tools.git", + pollAtLaunch=True, + pollInterval=poll_interval_in_seconds, + workdir="blender-studio-tools-gitpoller-workdir", + project="blender-studio-tools.git", + branches=["main"], + ) + ] + + return pollers diff --git a/config/pipeline/code.py b/config/pipeline/code.py new file mode 100644 index 0000000..69d0ef2 --- /dev/null +++ b/config/pipeline/code.py @@ -0,0 +1,748 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +from functools import partial +import pathlib +import random + +import buildbot.plugins + +from buildbot.plugins import steps as plugins_steps +from buildbot.plugins import schedulers as plugins_schedulers + +import conf.branches +import conf.worker + +import pipeline.common +import gitea.reporter + +# Timeouts. +default_step_timeout_in_seconds = 10 * 60 +# TODO: Compile step needs more because of the link on Windows +compile_code_step_timeout_in_seconds = 10 * 60 +compile_gpu_step_timeout_in_seconds = 1.5 * 60 * 60 + +tree_stable_timer_in_seconds = 15 * 60 + +package_step_timeout_in_seconds = 20 * 60 + +# Build steps. +code_pipeline_general_step_names = [ + "configure-machine", + "update-code", + "compile-code", + "compile-gpu", + "compile-install", + "test-code", + "sign-code-binaries", + "package-code-binaries", + "deliver-code-binaries", + "deliver-test-results", + "clean", +] + +code_pipeline_daily_step_names = code_pipeline_general_step_names + +code_pipeline_patch_step_names = [ + "configure-machine", + "update-code", + "compile-code", + "compile-gpu", + "compile-install", + "test-code", + "sign-code-binaries", + "package-code-binaries", + "deliver-code-binaries", + "deliver-test-results", + "clean", +] + +code_pipeline_experimental_step_names = code_pipeline_general_step_names + +pipeline_types_step_names = { + "daily": code_pipeline_daily_step_names, + "patch": code_pipeline_patch_step_names, + "experimental": code_pipeline_experimental_step_names, +} + +code_pipeline_lint_step_names = [ + "configure-machine", + "update-code", + "lint-code", +] + +# Steps for testing. +code_pipeline_test_step_names = [ + "test-code", +] + +# Steps for package delivery. +code_delivery_step_names = [ + "sign-code-binaries", + "package-code-binaries", + "deliver-code-binaries", +] + +# Steps skipped for Python module. +code_python_module_skip_test_names = ["sign-code-binaries"] + + +# Tracks. +code_tracked_branch_ids = conf.branches.code_tracked_branch_ids +code_track_ids = list(code_tracked_branch_ids.keys()) +code_all_platform_architectures = conf.branches.code_all_platform_architectures +code_official_platform_architectures = conf.branches.code_official_platform_architectures + +code_track_pipeline_types = {} +track_properties = {} +for track, branch in code_tracked_branch_ids.items(): + if track == "vdev": + code_track_pipeline_types[track] = ["daily"] + elif track == "vexp": + code_track_pipeline_types[track] = ["experimental", "patch"] + else: + code_track_pipeline_types[track] = ["daily"] + + # Track properties. + track_properties[track] = [ + buildbot.plugins.util.ChoiceStringParameter( + name="platform_architectures", + label="Platforms:", + required=True, + choices=code_all_platform_architectures[track], + multiple=True, + strict=True, + default=code_official_platform_architectures[track], + ), + ] + +# Scheduler properties. +scheduler_properties_common = [ + buildbot.plugins.util.BooleanParameter( + name="python_module", + label="Python module -> build bpy module instead of Blender", + required=True, + strict=True, + default=False, + ), + buildbot.plugins.util.BooleanParameter( + name="needs_full_clean", + label="Full clean -> removes build workspace on machine", + required=True, + strict=True, + default=False, + ), + buildbot.plugins.util.BooleanParameter( + name="needs_package_delivery", + label="Package delivery -> push files to configured services", + required=True, + strict=True, + default=False, + ), + buildbot.plugins.util.BooleanParameter( + name="needs_gpu_binaries", + label="GPU binaries -> build Cycles GPU kernels", + required=True, + strict=True, + default=False, + ), + buildbot.plugins.util.BooleanParameter( + name="needs_gpu_tests", + label="GPU tests -> run EEVEE, Viewport and Cycles GPU tests", + required=True, + strict=True, + default=False, + ), +] + +# code-daily +scheduler_properties_daily = scheduler_properties_common + +# code-experimental properties. +scheduler_properties_experimental = [ + buildbot.plugins.util.StringParameter( + name="override_branch_id", + label="Branch:", + required=True, + size=80, + regex=r"^[a-zA-Z0-9][A-Za-z0-9\._-]*$", + default="", + ), + buildbot.plugins.util.ChoiceStringParameter( + name="build_configuration", + label="Configuration:", + required=True, + choices=["release", "sanitizer", "debug"], + multiple=False, + strict=True, + default="release", + ), + buildbot.plugins.util.BooleanParameter( + name="needs_skip_tests", + label="Skip tests -> bypass running all tests", + required=True, + strict=True, + default=False, + ), +] +scheduler_properties_experimental += scheduler_properties_common + + +# code-patch properties. +scheduler_properties_patch = [ + buildbot.plugins.util.StringParameter( + name="patch_id", label="Patch Id:", required=True, size=80, default="" + ), + buildbot.plugins.util.ChoiceStringParameter( + name="build_configuration", + label="Configuration:", + required=True, + choices=["release", "sanitizer", "debug"], + multiple=False, + strict=True, + default="release", + ), + buildbot.plugins.util.BooleanParameter( + name="needs_skip_tests", + label="Skip tests -> bypass running all tests", + required=True, + strict=True, + default=False, + ), + buildbot.plugins.util.StringParameter( + name="pull_revision", label="Pull Revision:", required=False, hide=True, size=80, default="" + ), +] + +scheduler_properties_patch += scheduler_properties_common + +scheduler_properties = { + "code-daily": scheduler_properties_daily, + "code-experimental": scheduler_properties_experimental, + "code-patch": scheduler_properties_patch, +} + + +@buildbot.plugins.util.renderer +def create_code_worker_command_args(props, devops_env_id, track_id, pipeline_type, step_name): + commit_id = pipeline.common.fetch_property(props, key="revision", default="HEAD") + patch_id = pipeline.common.fetch_property(props, key="patch_id", default="") + override_branch_id = pipeline.common.fetch_property(props, key="override_branch_id", default="") + python_module = pipeline.common.fetch_property(props, key="python_module", default=False) + needs_gpu_tests = pipeline.common.fetch_property(props, key="needs_gpu_tests", default=False) + needs_gpu_binaries = pipeline.common.fetch_property( + props, key="needs_gpu_binaries", default=False + ) + build_configuration = pipeline.common.fetch_property( + props, key="build_configuration", default="release" + ) + needs_full_clean = pipeline.common.fetch_property( + props, key="needs_full_clean", default="false" + ) + needs_full_clean = needs_full_clean in ["true", True] + needs_package_delivery = pipeline.common.fetch_property( + props, key="needs_package_delivery", default="false" + ) + needs_package_delivery = needs_package_delivery in ["true", True] + + # Auto enable asserts when not using package delivery. Only support in 4.1+. + if track_id not in ("v360"): + if build_configuration == "release" and not needs_package_delivery: + build_configuration = "asserts" + + platform_id, architecture = pipeline.common.fetch_platform_architecture(props) + + args = [] + + if architecture: + args += ["--architecture", architecture] + + if pipeline_type == "patch": + # Powershell doesn't like # in string argument so strip it. + args += ["--patch-id", patch_id.lstrip("#")] + elif pipeline_type == "experimental": + args += ["--branch-id", override_branch_id] + + args += ["--commit-id", commit_id] + args += ["--build-configuration", build_configuration] + + if python_module: + args += ["--python-module"] + if needs_full_clean: + args += ["--needs-full-clean"] + if step_name in ["compile-gpu", "compile-install", "test-code"]: + if needs_package_delivery or needs_gpu_binaries: + args += ["--needs-gpu-binaries"] + if needs_gpu_tests: + args += ["--needs-gpu-tests"] + + args += [step_name] + + return pipeline.common.create_worker_command("code.py", devops_env_id, track_id, args) + + +def needs_do_code_pipeline_step(step): + build = step.build + # Use this to test master steps only, otherwise we be waiting for 30 minutes + needs_master_steps_only = False + + if needs_master_steps_only: + is_master_step = step.name in pipeline.common.code_pipeline_master_step_names + return is_master_step + + worker = step.worker + worker_name = step.getWorkerName() + worker_system = worker.worker_system + + is_package_delivery_step = (step.name in code_delivery_step_names) or ( + step.name in pipeline.common.code_pipeline_master_step_names + ) + needs_package_delivery = step.getProperty("needs_package_delivery") + needs_gpu_binaries = step.getProperty("needs_gpu_binaries") + needs_skip_tests = step.getProperty("needs_skip_tests") + + python_module = step.getProperty("python_module") + + needs_do_it = True + + if step.name in code_pipeline_test_step_names: + needs_do_it = not needs_skip_tests + elif step.name == "compile-gpu": + needs_do_it = needs_package_delivery or needs_gpu_binaries + elif is_package_delivery_step: + needs_do_it = needs_package_delivery + + if python_module and (step.name in code_python_module_skip_test_names): + needs_do_it = False + + return needs_do_it + + +# Custom file upload that shows links to download files. +class LinkMultipleFileUpload(plugins_steps.MultipleFileUpload): + def uploadDone(self, result, source, masterdest): + if not self.url: + return + + name = pathlib.Path(source).name + if name.endswith(".zip"): + self.addURL(name, self.url + "/" + name) + else: + self.addURL(name, self.url + "/" + name + "/report.html") + + def allUploadsDone(self, result, sources, masterdest): + return + + +def create_deliver_code_binaries_step(worker_config, track_id, pipeline_type): + file_size_in_mb = 500 * 1024 * 1024 + worker_source_path = pathlib.Path(f"../../../../git/blender-{track_id}/build_package") + master_dest_path = pathlib.Path( + f"{worker_config.buildbot_download_folder}/{pipeline_type}" + ).expanduser() + + return plugins_steps.MultipleFileUpload( + name="deliver-code-binaries", + maxsize=file_size_in_mb, + workdir=f"{worker_source_path}", + glob=True, + workersrcs=["*.*"], + masterdest=f"{master_dest_path}", + mode=0o644, + url=None, + description="running", + descriptionDone="completed", + doStepIf=needs_do_code_pipeline_step, + ) + + +def create_deliver_test_results_step(worker_config, track_id, pipeline_type): + file_size_in_mb = 500 * 1024 * 1024 + worker_source_path = pathlib.Path(f"../../../../git/blender-{track_id}/build_package") + master_dest_path = pathlib.Path( + f"{worker_config.buildbot_download_folder}/{pipeline_type}" + ).expanduser() + + tests_worker_source_path = worker_source_path / "tests" + tests_master_dest_path = master_dest_path / "tests" + tests_worker_srcs = ["tests-*.zip"] + + branch_id = code_tracked_branch_ids[track_id] + if branch_id: + branch_id = branch_id.replace("blender-", "").replace("-release", "") + tests_worker_srcs.append(branch_id + "-*") + + return LinkMultipleFileUpload( + name="deliver-test-results", + maxsize=file_size_in_mb, + workdir=f"{tests_worker_source_path}", + glob=True, + workersrcs=tests_worker_srcs, + masterdest=f"{tests_master_dest_path}", + mode=0o644, + url=f"../download/{pipeline_type}/tests", + description="running", + descriptionDone="completed", + alwaysRun=True, + ) + + +def next_worker_code(worker_names_gpu, builder, workers, request): + # Use a GPU worker if needed and supported for this platform. + # NVIDIA worker is currently reserved for GPU builds only. + compatible_workers = [] + if request.properties.getProperty("needs_gpu_tests", False) and worker_names_gpu: + for worker in workers: + if worker.worker.workername in worker_names_gpu: + compatible_workers.append(worker) + else: + for worker in workers: + if "nvidia" not in worker.worker.workername: + compatible_workers.append(worker) + + if not compatible_workers: + return None + + return random.choice(compatible_workers) + + +class PlatformTrigger(plugins_steps.Trigger): + def getSchedulersAndProperties(self): + schedulers = [] + + platform_architectures = self.set_properties["platform_architectures"] + + for scheduler in self.schedulerNames: + found = False + if "lint" in scheduler: + found = True + for platform_architecture in platform_architectures: + if platform_architecture in scheduler: + found = True + + if found: + schedulers.append( + { + "sched_name": scheduler, + "props_to_set": self.set_properties, + "unimportant": False, + } + ) + + return schedulers + + +def populate(devops_env_id): + builders = [] + schedulers = [] + + platform_worker_names = conf.machines.fetch_platform_worker_names(devops_env_id) + local_worker_names = conf.machines.fetch_local_worker_names() + + worker_config = conf.worker.get_config(devops_env_id) + + needs_incremental_schedulers = devops_env_id in ["PROD"] + needs_nightly_schedulers = devops_env_id in ["PROD"] + + print("*** Creating [code] pipeline") + for track_id in code_track_ids: + pipeline_types = code_track_pipeline_types[track_id] + for pipeline_type in pipeline_types: + # Create steps. + step_names = pipeline_types_step_names[pipeline_type] + pipeline_build_factory = buildbot.plugins.util.BuildFactory() + + print(f"Creating [{track_id}] [code] [{pipeline_type}] pipeline steps") + for step_name in step_names: + if step_name == "deliver-code-binaries": + step = create_deliver_code_binaries_step(worker_config, track_id, pipeline_type) + elif step_name == "deliver-test-results": + step = create_deliver_test_results_step(worker_config, track_id, pipeline_type) + else: + needs_halt_on_failure = True + if step_name in code_pipeline_test_step_names: + needs_halt_on_failure = track_id != "vexp" + + step_timeout_in_seconds = default_step_timeout_in_seconds + if step_name == "compile-code": + step_timeout_in_seconds = compile_code_step_timeout_in_seconds + elif step_name == "compile-gpu": + step_timeout_in_seconds = compile_gpu_step_timeout_in_seconds + + step_command = create_code_worker_command_args.withArgs( + devops_env_id, track_id, pipeline_type, step_name + ) + + step = buildbot.plugins.steps.ShellCommand( + name=step_name, + logEnviron=True, + haltOnFailure=needs_halt_on_failure, + timeout=step_timeout_in_seconds, + description="running", + descriptionDone="completed", + doStepIf=needs_do_code_pipeline_step, + command=step_command, + ) + + pipeline_build_factory.addStep(step) + + for master_step_name in pipeline.common.code_pipeline_master_step_names: + master_step_command = pipeline.common.create_master_command_args.withArgs( + devops_env_id, track_id, pipeline_type, master_step_name, single_platform=True + ) + + # Master to archive and purge builds + pipeline_build_factory.addStep( + plugins_steps.MasterShellCommand( + name=master_step_name, + logEnviron=False, + command=master_step_command, + description="running", + descriptionDone="completed", + doStepIf=needs_do_code_pipeline_step, + ) + ) + + # Create lint pipeline + pipeline_lint_factory = buildbot.plugins.util.BuildFactory() + for step_name in code_pipeline_lint_step_names: + step_command = create_code_worker_command_args.withArgs( + devops_env_id, track_id, pipeline_type, step_name + ) + + pipeline_lint_factory.addStep( + buildbot.plugins.steps.ShellCommand( + name=step_name, + logEnviron=True, + haltOnFailure=True, + timeout=default_step_timeout_in_seconds, + description="running", + descriptionDone="completed", + command=step_command, + ) + ) + + triggerable_scheduler_names = [] + trigger_factory = buildbot.plugins.util.BuildFactory() + + # Create builders. + for platform_architecture in code_all_platform_architectures[track_id]: + print(f"Creating [{track_id}] [{pipeline_type}] [{platform_architecture}] builders") + + worker_group_id = f"{platform_architecture}-code" + worker_group_id_gpu = f"{platform_architecture}-code-gpu" + + pipeline_worker_names = platform_worker_names[worker_group_id] + pipeline_worker_names_gpu = platform_worker_names[worker_group_id_gpu] + if pipeline_worker_names: + # Only create the builders if the worker exists + pipeline_builder_name = ( + f"{track_id}-code-{pipeline_type}-{platform_architecture}" + ) + pipeline_builder_tags = pipeline_builder_name.split("-") + + # Assigning different workers for different tracks, specifically Linux builders. + suitable_pipeline_worker_names = pipeline_worker_names + if platform_architecture == "linux-x86_64" and devops_env_id != "LOCAL": + selector = "rocky" + suitable_pipeline_worker_names = [ + worker for worker in pipeline_worker_names if selector in worker + ] + + builders += [ + buildbot.plugins.util.BuilderConfig( + name=pipeline_builder_name, + workernames=suitable_pipeline_worker_names, + nextWorker=partial(next_worker_code, pipeline_worker_names_gpu), + tags=pipeline_builder_tags, + factory=pipeline_build_factory, + ) + ] + + pipeline_scheduler_name = ( + f"{track_id}-code-{pipeline_type}-{platform_architecture}-triggerable" + ) + triggerable_scheduler_names += [pipeline_scheduler_name] + + schedulers += [ + plugins_schedulers.Triggerable( + name=pipeline_scheduler_name, builderNames=[pipeline_builder_name] + ) + ] + + # Create lint builder + if track_id not in conf.branches.all_lts_tracks: + print(f"Creating [{track_id}] [{pipeline_type}] [lint] builders") + + pipeline_worker_names = platform_worker_names["code-lint"] + if pipeline_worker_names: + # Only create the builders if the worker exists + pipeline_builder_name = f"{track_id}-code-{pipeline_type}-lint" + pipeline_builder_tags = pipeline_builder_name.split("-") + + builders += [ + buildbot.plugins.util.BuilderConfig( + name=pipeline_builder_name, + workernames=pipeline_worker_names, + tags=pipeline_builder_tags, + factory=pipeline_lint_factory, + ) + ] + + pipeline_scheduler_name = f"{track_id}-code-{pipeline_type}-lint-triggerable" + triggerable_scheduler_names += [pipeline_scheduler_name] + + schedulers += [ + plugins_schedulers.Triggerable( + name=pipeline_scheduler_name, builderNames=[pipeline_builder_name] + ) + ] + + # Create coordinator. + if triggerable_scheduler_names: + trigger_properties = { + "python_module": buildbot.plugins.util.Property("python_module"), + "needs_full_clean": buildbot.plugins.util.Property("needs_full_clean"), + "needs_package_delivery": buildbot.plugins.util.Property( + "needs_package_delivery" + ), + "needs_gpu_binaries": buildbot.plugins.util.Property("needs_gpu_binaries"), + "needs_gpu_tests": buildbot.plugins.util.Property("needs_gpu_tests"), + "needs_skip_tests": buildbot.plugins.util.Property("needs_skip_tests"), + "platform_architectures": buildbot.plugins.util.Property( + "platform_architectures" + ), + } + if pipeline_type == "patch": + trigger_properties["patch_id"] = buildbot.plugins.util.Property("patch_id") + trigger_properties["revision"] = buildbot.plugins.util.Property("revision") + trigger_properties["build_configuration"] = buildbot.plugins.util.Property( + "build_configuration" + ) + trigger_factory.addStep( + plugins_steps.SetProperties( + name="get-revision", properties=gitea.blender.get_patch_revision + ) + ) + elif pipeline_type == "experimental": + trigger_properties["override_branch_id"] = buildbot.plugins.util.Property( + "override_branch_id" + ) + trigger_properties["revision"] = buildbot.plugins.util.Property("revision") + trigger_properties["build_configuration"] = buildbot.plugins.util.Property( + "build_configuration" + ) + trigger_factory.addStep( + plugins_steps.SetProperties( + name="get-revision", properties=gitea.blender.get_branch_revision + ) + ) + + trigger_factory.addStep( + PlatformTrigger( + schedulerNames=triggerable_scheduler_names, + waitForFinish=True, + updateSourceStamp=False, + set_properties=trigger_properties, + description="running", + descriptionDone="completed", + ) + ) + + coordinator_builder_name = f"{track_id}-code-{pipeline_type}-coordinator" + builder_tags = coordinator_builder_name.split("-") + + builders += [ + buildbot.plugins.util.BuilderConfig( + name=coordinator_builder_name, + workernames=local_worker_names, + tags=builder_tags, + factory=trigger_factory, + ) + ] + + coordinator_scheduler_name = f"{track_id}-code-{pipeline_type}-coordinator-force" + schedulers += [ + plugins_schedulers.ForceScheduler( + name=coordinator_scheduler_name, + buttonName=f"Trigger {pipeline_type} build", + builderNames=[coordinator_builder_name], + codebases=[ + buildbot.plugins.util.CodebaseParameter( + codebase="blender.git", + project="blender.git", + branch=code_tracked_branch_ids[track_id], + hide=True, + ) + ], + properties=track_properties[track_id] + + scheduler_properties[f"code-{pipeline_type}"], + ) + ] + + # Daily scheduler. + if pipeline_type == "daily": + print(f"Adding [{pipeline_type}] schedulers") + if needs_incremental_schedulers and (track_id in code_track_ids): + incremental_scheduler_name = ( + f"{track_id}-code-{pipeline_type}-coordinator-incremental" + ) + incremental_scheduler_properties = { + "revision": "HEAD", + "python_module": False, + "needs_skip_tests": False, + "needs_package_delivery": False, + "needs_gpu_binaries": False, + "build_configuration": "release", + "platform_architectures": code_official_platform_architectures[ + track_id + ], + } + + change_filter = buildbot.plugins.util.ChangeFilter( + project=["blender.git"], branch=code_tracked_branch_ids[track_id] + ) + schedulers += [ + plugins_schedulers.SingleBranchScheduler( + name=incremental_scheduler_name, + builderNames=[coordinator_builder_name], + change_filter=change_filter, + properties=incremental_scheduler_properties, + treeStableTimer=tree_stable_timer_in_seconds, + ) + ] + + if needs_nightly_schedulers and (track_id in code_track_ids): + nightly_scheduler_name = ( + f"{track_id}-code-{pipeline_type}-coordinator-nightly" + ) + nightly_properties = { + "revision": "HEAD", + "python_module": False, + "needs_skip_tests": False, + "needs_package_delivery": True, + "needs_gpu_binaries": True, + "build_configuration": "release", + "platform_architectures": code_all_platform_architectures[track_id], + } + nightly_codebases = { + "blender.git": { + "repository": "", + "branch": code_tracked_branch_ids[track_id], + "revision": None, + } + } + schedulers += [ + plugins_schedulers.Nightly( + name=nightly_scheduler_name, + builderNames=[coordinator_builder_name], + codebases=nightly_codebases, + properties=nightly_properties, + onlyIfChanged=False, + hour=1, + minute=30, + ) + ] + + return builders, schedulers diff --git a/config/pipeline/code_benchmark.py b/config/pipeline/code_benchmark.py new file mode 100644 index 0000000..ab695d0 --- /dev/null +++ b/config/pipeline/code_benchmark.py @@ -0,0 +1,94 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import pathlib +from functools import partial + +import buildbot.plugins +from buildbot.plugins import steps as plugins_steps + +import conf.branches +import conf.worker +import pipeline.common + + +# Custom file upload that shows links to download files. +class LinkMultipleFileUpload(plugins_steps.MultipleFileUpload): + def uploadDone(self, result, source, masterdest): + if not self.url: + return + + name = pathlib.Path(source).name + self.addURL(name, self.url + "/" + name + "/report.html") + + def allUploadsDone(self, result, sources, masterdest): + return + + +def create_deliver_step(devops_env_id): + worker_config = conf.worker.get_config(devops_env_id) + + file_size_in_mb = 500 * 1024 * 1024 + worker_source_path = pathlib.Path("../../../../git/blender-vdev/build_package") + master_dest_path = worker_config.buildbot_download_folder / "daily" / "benchmarks" + + return LinkMultipleFileUpload( + name="deliver", + maxsize=file_size_in_mb, + workdir=f"{worker_source_path}", + glob=True, + workersrcs=["main-*"], + masterdest=f"{master_dest_path}", + mode=0o644, + url="../download/daily/benchmarks", + description="running", + descriptionDone="completed", + alwaysRun=True, + ) + + +def populate(devops_env_id): + properties = [ + buildbot.plugins.util.StringParameter( + name="commit_id", + label="Commit:", + required=True, + size=80, + default="HEAD", + ), + buildbot.plugins.util.BooleanParameter( + name="needs_gpu_binaries", + label="GPU binaries -> build Cycles GPU kernels", + required=True, + strict=True, + default=True, + hide=True, + ), + ] + + return pipeline.common.create_pipeline( + devops_env_id, + "code-benchmark", + "code_benchmark.py", + [ + "configure-machine", + "update-code", + "compile-code", + "compile-gpu", + "compile-install", + "benchmark", + partial(create_deliver_step, devops_env_id), + "clean", + ], + {"vdev": "main"}, + properties, + "blender.git", + ["linux-x86_64-code-gpu", "darwin-arm64-code-gpu"], + # Compile GPU step needs a long timeout. + default_step_timeout_in_seconds=90 * 60, + variations=["linux", "darwin"], + nightly_properties={"commit_id": "HEAD", "needs_gpu_binaries": True}, + hour=7, + minute=30, + ) diff --git a/config/pipeline/code_bpy_deploy.py b/config/pipeline/code_bpy_deploy.py new file mode 100644 index 0000000..b5e5f48 --- /dev/null +++ b/config/pipeline/code_bpy_deploy.py @@ -0,0 +1,30 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +# Builders for deploying Python module releases to PyPI. + + +import conf.branches +import pipeline.common + + +def populate(devops_env_id): + properties = [] + + return pipeline.common.create_pipeline( + devops_env_id, + "code-bpy-deploy", + "code_bpy_deploy.py", + [ + "configure-machine", + "update-code", + "pull", + "deliver-pypi", + "clean", + ], + conf.branches.code_deploy_track_ids, + properties, + "blender.git", + ["linux-x86_64-general"], + ) diff --git a/config/pipeline/code_deploy.py b/config/pipeline/code_deploy.py new file mode 100644 index 0000000..cb89aba --- /dev/null +++ b/config/pipeline/code_deploy.py @@ -0,0 +1,43 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +# Builders for deploying Blender releases. + +import buildbot.plugins + +import conf.branches +import pipeline.common + + +def populate(devops_env_id): + properties = [ + buildbot.plugins.util.BooleanParameter( + name="needs_full_clean", + label="Full clean -> removes build workspace on machine", + required=True, + strict=True, + default=False, + ), + ] + + return pipeline.common.create_pipeline( + devops_env_id, + "code-artifacts-deploy", + "code_deploy.py", + [ + "configure-machine", + "update-code", + "package-source", + "pull-artifacts", + "repackage-artifacts", + "deploy-artifacts", + "monitor-artifacts", + "clean", + ], + conf.branches.code_deploy_track_ids, + properties, + "blender.git", + ["linux-x86_64-general"], + default_step_timeout_in_seconds=30 * 60, + ) diff --git a/config/pipeline/code_store.py b/config/pipeline/code_store.py new file mode 100644 index 0000000..35aaf09 --- /dev/null +++ b/config/pipeline/code_store.py @@ -0,0 +1,235 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +# Builders for releasing Blender to stores. + +import pathlib + +import buildbot.plugins + +from buildbot.plugins import steps as plugins_steps +from buildbot.plugins import schedulers as plugins_schedulers + +import conf.branches +import conf.worker +import pipeline.common + +# Timeouts. +default_step_timeout_in_seconds = 60 * 60 + +# Tracks. +track_ids = conf.branches.code_store_track_ids +tracked_branch_ids = {} +for track_id in track_ids: + tracked_branch_ids[track_id] = conf.branches.code_tracked_branch_ids[track_id] + +# Properties. +scheduler_properties = [ + buildbot.plugins.util.ChoiceStringParameter( + name="store_id", + label="Store:", + required=True, + choices=["snap", "steam", "windows"], + multiple=True, + strict=True, + default=["snap", "steam", "windows"], + ), +] + + +def create_deliver_binaries_windows_step(worker_config, track_id, pipeline_type): + # Create step for uploading msix to download.blender.org. + file_size_in_mb = 500 * 1024 * 1024 + worker_source_path = pathlib.Path(f"../../../../git/blender-{track_id}/build_package") + master_dest_path = pathlib.Path( + f"{worker_config.buildbot_download_folder}/{pipeline_type}" + ).expanduser() + + return plugins_steps.MultipleFileUpload( + name="deliver-binaries", + maxsize=file_size_in_mb, + workdir=f"{worker_source_path}", + glob=True, + workersrcs=["*.msix*"], + masterdest=f"{master_dest_path}", + mode=0o644, + url=None, + description="running", + descriptionDone="completed", + ) + + +def populate(devops_env_id): + builders = [] + schedulers = [] + + platform_worker_names = conf.machines.fetch_platform_worker_names(devops_env_id) + local_worker_names = conf.machines.fetch_local_worker_names() + + worker_config = conf.worker.get_config(devops_env_id) + + needs_nightly_schedulers = devops_env_id == "PROD" + + pipeline_type = "daily" + + store_ids = ["steam", "snap", "windows"] + + print("*** Creating [code] [store] pipeline") + for track_id in track_ids: + triggerable_scheduler_names = [] + trigger_factory = buildbot.plugins.util.BuildFactory() + + for store_id in store_ids: + # Create build steps. + pipeline_build_factory = buildbot.plugins.util.BuildFactory() + step_names = [ + "configure-machine", + "update-code", + "pull-artifacts", + "package", + ] + + if store_id == "windows": + step_names += ["deliver-binaries"] + else: + step_names += ["deliver"] + + step_names += ["clean"] + + print(f"Creating [{track_id}] [code] [store] [{store_id}] pipeline steps") + for step_name in step_names: + if step_name == "deliver-binaries": + step = create_deliver_binaries_windows_step( + worker_config, track_id, pipeline_type + ) + else: + args = ["--store-id", store_id, step_name] + step_command = pipeline.common.create_worker_command( + "code_store.py", devops_env_id, track_id, args + ) + + step = plugins_steps.ShellCommand( + name=step_name, + logEnviron=True, + haltOnFailure=True, + timeout=default_step_timeout_in_seconds, + description="running", + descriptionDone="completed", + command=step_command, + ) + + pipeline_build_factory.addStep(step) + + for master_step_name in pipeline.common.code_pipeline_master_step_names: + master_step_command = pipeline.common.create_master_command_args.withArgs( + devops_env_id, track_id, pipeline_type, master_step_name, single_platform=False + ) + + # Master to archive and purge builds + pipeline_build_factory.addStep( + plugins_steps.MasterShellCommand( + name=master_step_name, + logEnviron=False, + command=master_step_command, + description="running", + descriptionDone="completed", + ) + ) + + # Create builders. + worker_group_id = ( + f"windows-amd64-store-{store_id}" + if store_id == "windows" + else f"linux-x86_64-store-{store_id}" + ) + pipeline_worker_names = platform_worker_names[worker_group_id] + if pipeline_worker_names: + pipeline_builder_name = f"{track_id}-code-store-{store_id}" + + builder_tags = pipeline_builder_name.split("-") + + builders += [ + buildbot.plugins.util.BuilderConfig( + name=pipeline_builder_name, + workernames=pipeline_worker_names, + tags=builder_tags, + factory=pipeline_build_factory, + ) + ] + + scheduler_name = f"{track_id}-code-store-{store_id}-triggerable" + triggerable_scheduler_names += [scheduler_name] + + schedulers += [ + plugins_schedulers.Triggerable( + name=scheduler_name, builderNames=[pipeline_builder_name] + ) + ] + + # Create coordinator. + if triggerable_scheduler_names: + trigger_properties = {} + trigger_factory.addStep( + plugins_steps.Trigger( + schedulerNames=triggerable_scheduler_names, + waitForFinish=True, + updateSourceStamp=False, + set_properties=trigger_properties, + description="running", + descriptionDone="completed", + ) + ) + + coordinator_builder_name = f"{track_id}-code-store-coordinator" + builder_tags = coordinator_builder_name.split("-") + + builders += [ + buildbot.plugins.util.BuilderConfig( + name=coordinator_builder_name, + workernames=local_worker_names, + tags=builder_tags, + factory=trigger_factory, + ) + ] + + coordinator_scheduler_name = f"{track_id}-code-store-coordinator-force" + schedulers += [ + plugins_schedulers.ForceScheduler( + name=coordinator_scheduler_name, + buttonName="Trigger store build", + builderNames=[coordinator_builder_name], + codebases=[ + buildbot.plugins.util.CodebaseParameter( + codebase="", revision=None, hide=True + ) + ], + properties=scheduler_properties, + ) + ] + + if needs_nightly_schedulers and (track_id in track_ids): + nightly_scheduler_name = f"{track_id}-code-store-coordinator-nightly" + nightly_properties = { + "revision": "HEAD", + } + nightly_codebases = { + "blender.git": { + "repository": "", + "branch": tracked_branch_ids[track_id], + "revision": None, + } + } + schedulers += [ + plugins_schedulers.Nightly( + name=nightly_scheduler_name, + builderNames=[coordinator_builder_name], + codebases=nightly_codebases, + properties=nightly_properties, + onlyIfChanged=False, + hour=5, + minute=30, + ) + ] + + return builders, schedulers diff --git a/config/pipeline/common.py b/config/pipeline/common.py new file mode 100644 index 0000000..dfd7124 --- /dev/null +++ b/config/pipeline/common.py @@ -0,0 +1,335 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import buildbot.plugins + +from buildbot.plugins import steps as plugins_steps +from buildbot.plugins import schedulers as plugins_schedulers + +import conf.machines + +devops_git_root_path = "~/git" + +# Steps that run on the buildbot master. +code_pipeline_master_step_names = [ + "deduplicate-binaries", + "purge-binaries", +] + + +def fetch_property(props, key, default=None): + value = default + if key in props: + value = props[key] + + return value + + +def fetch_platform_architecture(props): + platform_architectures = fetch_property(props, key="platform_architectures") + + # Find the platform arch for this builder + buildername = fetch_property(props, key="buildername") + builder_platform_architecture = "-".join(buildername.split("-")[-2:]) + + found_platform_architecture = None + if platform_architectures: + for platform_architecture in platform_architectures: + if platform_architecture in builder_platform_architecture: + found_platform_architecture = platform_architecture + break + + if found_platform_architecture: + return found_platform_architecture.split("-") + else: + return None, None + + +def always_do_step(step): + return True + + +def needs_do_doc_pipeline_step(step): + if "package" in step.name or "deliver" in step.name: + return step.getProperty("needs_package_delivery") + else: + return True + + +def create_worker_command(script, devops_env_id, track_id, args): + # This relative path assume were are in: + # ~/.devops/services/buildbot-worker//build + # There appears to be no way to expand a tilde here? + # + # This is assumed to run within the buildbot worker pipenv, + # so the python command should match the python version and + # available packages. + cmd = [ + "python", + f"../../../../../git/blender-devops/buildbot/worker/{script}", + "--track-id", + track_id, + "--service-env-id", + devops_env_id, + ] + + return cmd + list(args) + + +@buildbot.plugins.util.renderer +def create_master_command_args( + props, devops_env_id, track_id, pipeline_type, step_name, single_platform +): + build_configuration = fetch_property(props, key="build_configuration", default="release") + python_module = fetch_property(props, key="python_module", default=False) + python_module_string = "true" if python_module else "false" + + args = [ + "--pipeline-type", + pipeline_type, + "--build-configuration", + build_configuration, + ] + + if single_platform: + # Archive binaries for a single architecture only? + platform_id, architecture = fetch_platform_architecture(props) + args += ["--platform-id", platform_id, "--architecture", architecture] + + if python_module: + args += ["--python-module"] + + args += [step_name] + + # This relative path assume were are in: + # ~/.devops/services/buildbot-master + # There appears to be no way to expand a tilde here? + # + # This is assumed to run within the buildbot master pipenv, + # so the python command should match the python version and + # available packages. + cmd = [ + "python", + "../../../git/blender-devops/buildbot/worker/archive.py", + "--track-id", + track_id, + "--service-env-id", + devops_env_id, + ] + + return cmd + list(args) + + +@buildbot.plugins.util.renderer +def create_pipeline_worker_command( + props, + devops_env_id, + track_id, + script, + step_name, + variation_property, + variation, + builder_properties, +): + args = [step_name] + + if variation_property: + args += ["--" + variation_property.replace("_", "-"), variation] + + for builder_prop in builder_properties: + if builder_prop.name in props: + prop_value = props[builder_prop.name] + else: + prop_value = builder_prop.default + + argument_name = "--" + builder_prop.name.replace("_", "-") + if isinstance(builder_prop, buildbot.plugins.util.BooleanParameter): + if prop_value in ["true", True]: + args += [argument_name] + else: + args += [argument_name, prop_value] + + if "revision" in props and props["revision"]: + args += ["--commit-id", props["revision"]] + + return create_worker_command(script, devops_env_id, track_id, args) + + +def create_pipeline( + devops_env_id, + artifact_id, + script, + steps, + tracked_branch_ids, + properties, + codebase, + worker_group_ids, + variation_property=None, + variations=[""], + incremental_properties=None, + nightly_properties=None, + do_step_if=always_do_step, + default_step_timeout_in_seconds=600, + tree_stable_timer_in_seconds=180, + hour=5, + minute=0, +): + builders = [] + schedulers = [] + + platform_worker_names = conf.machines.fetch_platform_worker_names(devops_env_id) + local_worker_names = conf.machines.fetch_local_worker_names() + + needs_incremental_schedulers = incremental_properties is not None and devops_env_id in ["PROD"] + needs_nightly_schedulers = nightly_properties is not None and devops_env_id in ["PROD"] + track_ids = tracked_branch_ids.keys() + + print(f"*** Creating [{artifact_id}] pipeline") + for track_id in track_ids: + triggerable_scheduler_names = [] + trigger_factory = buildbot.plugins.util.BuildFactory() + + for worker_group_id, variation in zip(worker_group_ids, variations): + if variation: + pipeline_builder_name = f"{track_id}-{artifact_id}-{variation}" + else: + pipeline_builder_name = f"{track_id}-{artifact_id}" + + pipeline_build_factory = buildbot.plugins.util.BuildFactory() + + print(f"Creating [{pipeline_builder_name}] pipeline steps") + for step in steps: + if callable(step): + pipeline_build_factory.addStep(step()) + continue + + step_command = create_pipeline_worker_command.withArgs( + devops_env_id, + track_id, + script, + step, + variation_property, + variation, + properties, + ) + + pipeline_build_factory.addStep( + plugins_steps.ShellCommand( + name=step, + logEnviron=True, + haltOnFailure=True, + timeout=default_step_timeout_in_seconds, + description="running", + descriptionDone="completed", + command=step_command, + doStepIf=do_step_if, + ) + ) + + # Create builder. + pipeline_worker_names = platform_worker_names[worker_group_id] + if pipeline_worker_names: + builder_tags = pipeline_builder_name.split("-") + + builders += [ + buildbot.plugins.util.BuilderConfig( + name=pipeline_builder_name, + workernames=pipeline_worker_names, + tags=builder_tags, + factory=pipeline_build_factory, + ) + ] + + scheduler_name = f"{pipeline_builder_name}-triggerable" + triggerable_scheduler_names += [scheduler_name] + + schedulers += [ + plugins_schedulers.Triggerable( + name=scheduler_name, builderNames=[pipeline_builder_name] + ) + ] + + # Only create scheduler if we have something to to trigger + if triggerable_scheduler_names: + trigger_properties = {} + for property in properties: + if property != variation_property: + trigger_properties[property.name] = buildbot.plugins.util.Property( + property.name + ) + + trigger_factory.addStep( + plugins_steps.Trigger( + schedulerNames=triggerable_scheduler_names, + waitForFinish=True, + updateSourceStamp=False, + set_properties=trigger_properties, + description="running", + descriptionDone="completed", + ) + ) + + coordinator_builder_name = f"{track_id}-{artifact_id}-coordinator" + builder_tags = coordinator_builder_name.split("-") + builders += [ + buildbot.plugins.util.BuilderConfig( + name=coordinator_builder_name, + workernames=local_worker_names, + tags=builder_tags, + factory=trigger_factory, + ) + ] + + coordinator_scheduler_name = f"{track_id}-{artifact_id}-coordinator-force" + schedulers += [ + plugins_schedulers.ForceScheduler( + name=coordinator_scheduler_name, + buttonName="Trigger build", + builderNames=[coordinator_builder_name], + codebases=[ + buildbot.plugins.util.CodebaseParameter( + codebase="", revision=None, hide=True + ) + ], + properties=properties, + ) + ] + + if needs_incremental_schedulers and (track_id in track_ids): + incremental_scheduler_name = f"{track_id}-{artifact_id}-coordinator-incremental" + change_filter = buildbot.plugins.util.ChangeFilter( + project=[codebase], branch=tracked_branch_ids[track_id] + ) + schedulers += [ + plugins_schedulers.SingleBranchScheduler( + name=incremental_scheduler_name, + builderNames=[coordinator_builder_name], + change_filter=change_filter, + properties=incremental_properties, + treeStableTimer=tree_stable_timer_in_seconds, + ) + ] + + if needs_nightly_schedulers and (track_id in track_ids): + nightly_codebases = { + codebase: { + "repository": "", + "branch": tracked_branch_ids[track_id], + "revision": None, + } + } + nightly_scheduler_name = f"{track_id}-{artifact_id}-coordinator-nightly" + schedulers += [ + plugins_schedulers.Nightly( + name=nightly_scheduler_name, + builderNames=[coordinator_builder_name], + codebases=nightly_codebases, + properties=nightly_properties, + onlyIfChanged=False, + hour=hour, + minute=minute, + ) + ] + + return builders, schedulers diff --git a/config/pipeline/doc_api.py b/config/pipeline/doc_api.py new file mode 100644 index 0000000..09c1239 --- /dev/null +++ b/config/pipeline/doc_api.py @@ -0,0 +1,54 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import buildbot.plugins + +import conf.branches +import pipeline.common + + +def populate(devops_env_id): + properties = [ + buildbot.plugins.util.BooleanParameter( + name="needs_full_clean", + label="Full clean -> removes build workspace on machine", + required=True, + strict=True, + default=False, + ), + buildbot.plugins.util.BooleanParameter( + name="needs_package_delivery", + label="Package delivery -> push build to configured services", + required=True, + strict=True, + default=False, + ), + ] + + return pipeline.common.create_pipeline( + devops_env_id, + "doc-api", + "doc_api.py", + [ + "configure-machine", + "update-code", + "compile-code", + "compile-install", + "compile", + "package", + "deliver", + "clean", + ], + conf.branches.code_tracked_branch_ids, + properties, + "blender.git", + ["linux-x86_64-general"], + variations=["html"], + incremental_properties={"needs_package_delivery": False}, + nightly_properties={"needs_package_delivery": True}, + tree_stable_timer_in_seconds=15 * 60, + do_step_if=pipeline.common.needs_do_doc_pipeline_step, + hour=1, + minute=30, + ) diff --git a/config/pipeline/doc_developer.py b/config/pipeline/doc_developer.py new file mode 100644 index 0000000..2333f98 --- /dev/null +++ b/config/pipeline/doc_developer.py @@ -0,0 +1,32 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import buildbot.plugins + +import pipeline.common + + +def populate(devops_env_id): + properties = [ + buildbot.plugins.util.BooleanParameter( + name="needs_package_delivery", + label="Package delivery -> push build to configured services", + required=True, + strict=True, + default=True, + ), + ] + + return pipeline.common.create_pipeline( + devops_env_id, + "doc-developer", + "doc_developer.py", + ["update", "compile", "deliver"], + {"vdev": "main"}, + properties, + "blender-developer-docs.git", + ["linux-x86_64-general"], + incremental_properties={"needs_package_delivery": True}, + do_step_if=pipeline.common.needs_do_doc_pipeline_step, + ) diff --git a/config/pipeline/doc_manual.py b/config/pipeline/doc_manual.py new file mode 100644 index 0000000..4cdd619 --- /dev/null +++ b/config/pipeline/doc_manual.py @@ -0,0 +1,44 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import buildbot.plugins + +import conf.branches +import pipeline.common + + +def populate(devops_env_id): + properties = [ + buildbot.plugins.util.BooleanParameter( + name="needs_package_delivery", + label="Package delivery -> push build to configured services", + required=True, + strict=True, + default=True, + ), + buildbot.plugins.util.BooleanParameter( + name="needs_all_locales", + label="All locales -> process all configure locales", + required=True, + strict=True, + default=False, + ), + ] + + return pipeline.common.create_pipeline( + devops_env_id, + "doc-manual", + "doc_manual.py", + ["configure-machine", "update", "compile", "package", "deliver", "clean"], + conf.branches.code_tracked_branch_ids, + properties, + "blender-manual.git", + ["linux-x86_64-general", "linux-x86_64-general"], + variation_property="doc_format", + variations=["html", "epub"], + incremental_properties={"needs_package_delivery": True, "needs_all_locales": False}, + nightly_properties={"needs_package_delivery": True, "needs_all_locales": True}, + tree_stable_timer_in_seconds=15 * 60, + do_step_if=pipeline.common.needs_do_doc_pipeline_step, + ) diff --git a/config/pipeline/doc_studio.py b/config/pipeline/doc_studio.py new file mode 100644 index 0000000..279b8f6 --- /dev/null +++ b/config/pipeline/doc_studio.py @@ -0,0 +1,32 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import buildbot.plugins + +import pipeline.common + + +def populate(devops_env_id): + properties = [ + buildbot.plugins.util.BooleanParameter( + name="needs_package_delivery", + label="Package delivery -> push build to configured services", + required=True, + strict=True, + default=True, + ), + ] + + return pipeline.common.create_pipeline( + devops_env_id, + "doc-studio-tools", + "doc_studio.py", + ["update", "compile", "deliver"], + {"vdev": "main"}, + properties, + "blender-studio-tools.git", + ["linux-x86_64-doc-studio-tools"], + incremental_properties={"needs_package_delivery": True}, + do_step_if=pipeline.common.needs_do_doc_pipeline_step, + ) diff --git a/config/worker/__init__.py b/config/worker/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/config/worker/archive.py b/config/worker/archive.py new file mode 100755 index 0000000..6cfcaed --- /dev/null +++ b/config/worker/archive.py @@ -0,0 +1,346 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import argparse +import datetime +import os +import pathlib +import random +import re +import sys +import time + +from collections import OrderedDict +from typing import Any, Dict, List, Optional, Sequence, Union + +sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent)) + +import worker.utils + +package_file_pattern = re.compile( + r"^(?P(blender|bpy))\-" + + r"(?P[0-9]+\.[0-9]+\.[0-9]+)\-" + + r"(?P[a-z]+)\+" + + r"(?P[A-Za-z0-9_\-]+)\." + + r"(?P[a-fA-f0-9]+)\-" + + r"(?P[A-Za-z0-9_]+)\." + + r"(?P[A-Za-z0-9_]+)\-" + + r"(?P(release|asserts|sanitizer|debug))\." + + r"(?P[A-Za-z0-9\.]+)" +) + +pipeline_types = ["daily", "experimental", "patch"] +platforms = ["linux", "windows", "darwin"] +architectures = ["x86_64", "amd64", "arm64"] +build_configurations = ["release", "asserts", "sanitizer", "debug"] + + +class ArchiveBuilder(worker.utils.Builder): + def __init__(self, args: argparse.Namespace): + super().__init__(args, "blender", "blender") + self.pipeline_type = args.pipeline_type + self.platform_id = args.platform_id + self.architecture = args.architecture + self.build_configuration = args.build_configuration + self.python_module = args.python_module + self.dry_run = args.dry_run + self.retention_in_days = args.retention_in_days + + +def file_age_in_days(file_path: pathlib.Path) -> float: + try: + file_path_mtime = os.path.getmtime(file_path) + except: + return 0.0 + + age_in_seconds = time.time() - file_path_mtime + return age_in_seconds / (3600.0 * 24.0) + + +def parse_build_info(file_path: pathlib.Path) -> Optional[Dict]: + file_name = file_path.name + matches = re.match(package_file_pattern, file_path.name) + if not matches: + return None + build_info: Dict[str, Union[str, float, pathlib.Path]] = dict(matches.groupdict()) + build_info["file_age_in_days"] = file_age_in_days(file_path) + build_info["file_path"] = file_path + return build_info + + +def archive_build(file_path: pathlib.Path, dry_run: bool) -> None: + # Archive build file itself and checksum + checksum_file_path = file_path.parent / (file_path.name + ".sha256") + + for source_file_path in [file_path, checksum_file_path]: + if not source_file_path.exists(): + continue + + archive_path = source_file_path.parent / "archive" + os.makedirs(archive_path, exist_ok=True) + dest_file_path = archive_path / source_file_path.name + + worker.utils.remove_file(dest_file_path, dry_run=dry_run) + worker.utils.move(source_file_path, dest_file_path, dry_run=dry_run) + + +def fetch_current_builds( + builder: ArchiveBuilder, + pipeline_type: str, + short_version: Optional[str] = None, + all_platforms: bool = False, +) -> Dict[Any, List[Any]]: + app_id = "bpy" if builder.python_module else "blender" + + worker_config = builder.get_worker_config() + download_path = worker_config.buildbot_download_folder + pipeline_build_path = download_path / pipeline_type + + print(f"Fetching current builds in [{pipeline_build_path}]") + build_groups: Dict[Any, List[Any]] = {} + for file_path in pipeline_build_path.glob("*.*"): + if not file_path.is_file(): + continue + if file_path.name.endswith(".sha256"): + continue + + build_info = parse_build_info(file_path) + if not build_info: + continue + if short_version and not build_info["version_id"].startswith(short_version + "."): + continue + + if not all_platforms: + if builder.architecture and build_info["architecture"] != builder.architecture: + continue + if builder.platform_id and build_info["platform_id"] != builder.platform_id: + continue + if ( + builder.build_configuration + and build_info["build_configuration"] != builder.build_configuration + ): + continue + + if pipeline_type == "daily": + key = ( + "daily", + build_info["file_extension"], + build_info["architecture"], + build_info["platform_id"], + ) + else: + key = ( + build_info["branch_id"], + build_info["file_extension"], + build_info["architecture"], + build_info["platform_id"], + ) + + if key in build_groups: + build_groups[key].append(build_info) + else: + build_groups[key] = [build_info] + + return build_groups + + +def archive_build_group( + builds: Sequence[Dict], retention_in_days: int, dry_run: bool = True +) -> None: + builds = sorted(builds, key=lambda build: build["file_age_in_days"]) + + for i, build in enumerate(builds): + build_age = build["file_age_in_days"] + build_name = build["file_path"].name + + # Only keep the most recent build if there are multiple + if i > 0 or build_age > retention_in_days: + print(f"Archiving [{build_name}] (age: {build_age:.3f} days)") + archive_build(build["file_path"], dry_run=dry_run) + else: + print(f"Keeping [{build_name}] (age: {build_age:.3f} days)") + + +def deduplicate(builder: ArchiveBuilder) -> None: + retention_in_days = builder.retention_in_days + dry_run = builder.dry_run + + # Get major.minor version to match. + short_version = "" + if builder.pipeline_type == "daily": + branches_config = builder.get_branches_config() + short_version = branches_config.track_major_minor_versions[builder.track_id] + + if not short_version: + raise BaseException(f"Missing version in [{builder.pipeline_type}] builds, aborting") + + build_groups = fetch_current_builds(builder, builder.pipeline_type, short_version=short_version) + + print( + f"Deduplicating [{builder.pipeline_type}] builds for [{short_version}] [{builder.build_configuration}] [{builder.platform_id}] [{builder.architecture}]" + ) + for key, build_group in build_groups.items(): + print("") + print("--- Group: " + str(key)) + archive_build_group(build_group, retention_in_days, dry_run=dry_run) + + +def fetch_purge_builds( + builder: ArchiveBuilder, pipeline_type: str, folder: str +) -> Sequence[pathlib.Path]: + worker_config = builder.get_worker_config() + download_path = worker_config.buildbot_download_folder + archive_path = download_path / pipeline_type / folder + os.makedirs(archive_path, exist_ok=True) + + print(f"Fetching archived builds in [{archive_path}]") + builds = [] + for file_path in archive_path.glob("*.*"): + if not file_path.is_file(): + continue + if file_path.name.endswith(".sha256"): + continue + + builds.append(file_path) + + return builds + + +def purge(builder: ArchiveBuilder) -> None: + builds_retention_in_days = builder.retention_in_days + tests_retention_in_days = 10 + dry_run = builder.dry_run + + for pipeline_type in pipeline_types: + if pipeline_type != "daily": + print("=" * 120) + print(f"Deduplicating [{pipeline_type}] builds") + build_groups = fetch_current_builds(builder, pipeline_type, all_platforms=True) + for key, build_group in build_groups.items(): + print("") + print("--- Group: " + str(key)) + archive_build_group(build_group, builds_retention_in_days, dry_run=dry_run) + + print("=" * 120) + print(f"Purging [{pipeline_type}] builds older than [{builds_retention_in_days}] days") + for file_path in fetch_purge_builds(builder, pipeline_type, "archive"): + if file_age_in_days(file_path) < builds_retention_in_days: + continue + + age = file_age_in_days(file_path) + checksum_file_path = file_path.parent / (file_path.name + ".sha256") + + print(f"Deleting [{file_path.name}] (age: {age:.3f} days)") + worker.utils.remove_file(file_path, dry_run=dry_run) + worker.utils.remove_file(checksum_file_path, dry_run=dry_run) + + print("=" * 120) + print(f"Purging [{pipeline_type}] tests older than [{tests_retention_in_days}] days") + for file_path in fetch_purge_builds(builder, pipeline_type, "tests"): + if file_age_in_days(file_path) < tests_retention_in_days: + continue + + age = file_age_in_days(file_path) + checksum_file_path = file_path.parent / (file_path.name + ".sha256") + + print(f"Deleting [{file_path.name}] (age: {age:.3f} days)") + worker.utils.remove_file(file_path, dry_run=dry_run) + worker.utils.remove_file(checksum_file_path, dry_run=dry_run) + + +def generate_test_data(builder: ArchiveBuilder) -> None: + worker_config = builder.get_worker_config() + download_path = worker_config.buildbot_download_folder + + branches_config = builder.get_branches_config() + short_version = branches_config.track_major_minor_versions[builder.track_id] + version = short_version + ".0" + + app_id = "bpy" if builder.python_module else "blender" + commit_hashes = ["1ddf858", "03a2a53"] + risk_ids = ["stable", "alpha"] + file_extensions = ["zip", "msi"] + + if builder.pipeline_type == "daily": + versions = [short_version + ".0", short_version + ".1"] + branches = ["main", "v50"] + build_configurations = ["release"] + elif builder.pipeline_type == "patch": + versions = ["5.0.0", "7.0.0"] + branches = ["PR123", "PR456", "PR789"] + build_configurations = ["release", "debug"] + else: + versions = ["4.0.0", "6.0.0"] + branches = ["realtime-compositor", "cycles-x"] + build_configurations = ["release", "debug"] + + pipeline_path = download_path / builder.pipeline_type + os.makedirs(pipeline_path, exist_ok=True) + + for i in range(0, 25): + filename = ( + app_id + + "-" + + random.choice(versions) + + "-" + + random.choice(risk_ids) + + "+" + + random.choice(branches) + + "." + + random.choice(commit_hashes) + + "-" + + builder.platform_id + + "." + + builder.architecture + + "-" + + random.choice(build_configurations) + + "." + + random.choice(file_extensions) + ) + + file_path = pipeline_path / filename + file_path.write_text("Test") + + checksum_file_path = file_path.parent / (file_path.name + ".sha256") + checksum_file_path.write_text("Test") + + delta = datetime.timedelta(days=365 * random.random()) + filetime = time.mktime((datetime.datetime.today() - delta).timetuple()) + os.utime(file_path, (filetime, filetime)) + os.utime(checksum_file_path, (filetime, filetime)) + + +if __name__ == "__main__": + steps: worker.utils.BuilderSteps = OrderedDict() + steps["deduplicate-binaries"] = deduplicate + steps["purge-binaries"] = purge + + parser = worker.utils.create_argument_parser(steps=steps) + parser.add_argument( + "--pipeline-type", default="daily", type=str, choices=pipeline_types, required=False + ) + parser.add_argument("--platform-id", default="", type=str, choices=platforms, required=False) + parser.add_argument( + "--architecture", default="", type=str, choices=architectures, required=False + ) + parser.add_argument( + "--build-configuration", + default="release", + type=str, + choices=build_configurations, + required=False, + ) + parser.add_argument("--retention-in-days", default=100, type=int, required=False) + parser.add_argument("--python-module", action="store_true", required=False) + parser.add_argument("--dry-run", action="store_true", required=False) + parser.add_argument("--generate-test-data", action="store_true", required=False) + + args = parser.parse_args() + builder = ArchiveBuilder(args) + + if args.generate_test_data: + generate_test_data(builder) + + builder.run(args.step, steps) diff --git a/config/worker/blender/__init__.py b/config/worker/blender/__init__.py new file mode 100644 index 0000000..8b31b65 --- /dev/null +++ b/config/worker/blender/__init__.py @@ -0,0 +1,185 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import argparse +import os +import pathlib +import re +import subprocess + +from collections import OrderedDict +from typing import Callable, Any + +import worker.utils + + +class CodeBuilder(worker.utils.Builder): + def __init__(self, args: argparse.Namespace): + super().__init__(args, "blender", "blender") + self.needs_full_clean = args.needs_full_clean + self.needs_gpu_binaries = args.needs_gpu_binaries + self.needs_gpu_tests = args.needs_gpu_tests + self.needs_ninja = True + self.python_module = args.python_module + self.build_configuration = args.build_configuration + + track_path: pathlib.Path = self.track_path + + if self.platform in {"darwin", "windows"}: + if len(args.architecture): + self.architecture = args.architecture + + if self.platform == "darwin": + self.build_dir = track_path / f"build_{self.architecture}_{self.build_configuration}" + else: + self.build_dir = track_path / f"build_{self.build_configuration}" + + self.blender_dir = track_path / "blender.git" + self.install_dir = track_path / f"install_{self.build_configuration}" + self.package_dir = track_path / "build_package" + self.build_doc_path = track_path / "build_doc_api" + + def clean(self): + worker.utils.remove_dir(self.install_dir) + worker.utils.remove_dir(self.package_dir) + worker.utils.remove_dir(self.build_doc_path) + + # Call command with in compiler environment. + def call(self, cmd: worker.utils.CmdSequence, env: worker.utils.CmdEnvironment = None) -> int: + cmd_prefix: worker.utils.CmdList = [] + + if self.platform == "darwin": + # On macOS, override Xcode version if requested. + pipeline_config = self.pipeline_config() + xcode = pipeline_config.get("xcode", None) + xcode_version = xcode.get("version", None) if xcode else None + + if xcode_version: + developer_dir = f"/Applications/Xcode-{xcode_version}.app/Contents/Developer" + else: + developer_dir = "/Applications/Xcode.app/Contents/Developer" + + if self.service_env_id == "LOCAL" and not pathlib.Path(developer_dir).exists(): + worker.utils.warning( + f"Skip using non-existent {developer_dir} in LOCAL service environment" + ) + else: + cmd_prefix = ["xcrun"] + env = dict(env) if env else os.environ.copy() + env["DEVELOPER_DIR"] = developer_dir + + elif worker.utils.is_tool("scl"): + pipeline_config = self.pipeline_config() + gcc_version = pipeline_config["gcc"]["version"] + gcc_major_version = gcc_version.split(".")[0] + + # On Rocky + if os.path.exists("/etc/rocky-release"): + # Stub to override configured GCC version, remove when blender build config is fixed + gcc_major_version = "11" + cmd_prefix = ["scl", "enable", f"gcc-toolset-{gcc_major_version}", "--"] + + return worker.utils.call(cmd_prefix + list(cmd), env=env) + + def pipeline_config(self) -> dict: + config_file_path = self.code_path / "build_files" / "config" / "pipeline_config.json" + if not config_file_path.exists(): + config_file_path = config_file_path.with_suffix(".yaml") + if not config_file_path.exists(): + raise Exception(f"Config file [{config_file_path}] not found, aborting") + + with open(config_file_path, "r") as read_file: + if config_file_path.suffix == ".json": + import json + + pipeline_config = json.load(read_file) + else: + import yaml + + pipeline_config = yaml.load(read_file, Loader=yaml.SafeLoader) + + return pipeline_config["buildbot"] + + def blender_command_path(self) -> pathlib.Path: + if self.platform == "darwin": + return self.install_dir / "Blender.app" / "Contents" / "macOS" / "Blender" + elif self.platform == "windows": + return self.install_dir / "blender.exe" + else: + return self.install_dir / "blender" + + def setup_build_environment(self) -> None: + if self.platform != "windows": + return + + # CMake goes first to avoid using chocolaty cpack command. + worker.utils.info("Setting CMake path") + os.environ["PATH"] = "C:\\Program Files\\CMake\\bin" + os.pathsep + os.environ["PATH"] + + worker.utils.info("Setting VC Tools env variables") + windows_build_version = "10.0.19041.0" + os.environ["PATH"] = ( + f"C:\\Program Files (x86)\\Windows Kits\\10\\bin\\{windows_build_version}\\x64" + + os.pathsep + + os.environ["PATH"] + ) + os.environ["PATH"] = ( + "C:\\Program Files (x86)\\WiX Toolset v3.11\\bin" + os.pathsep + os.environ["PATH"] + ) + + if self.architecture == "arm64": + vs_build_tool_path = pathlib.Path( + "C:\\Program Files\\Microsoft Visual Studio\\2022\\Community\\VC\\Auxiliary\\Build\\vcvarsarm64.bat" + ) + vs_tool_install_dir_suffix = "\\bin\\Hostarm64\\arm64" + else: + vs_build_tool_path = pathlib.Path( + "C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community\\VC\\Auxiliary\\Build\\vcvars64.bat" + ) + vs_tool_install_dir_suffix = "\\bin\\Hostx64\\x64" + + vcvars_output = subprocess.check_output([vs_build_tool_path, "&&", "set"], shell=True) + vcvars_text = vcvars_output.decode("utf-8", "ignore") + + for line in vcvars_text.splitlines(): + match = re.match(r"(.*?)=(.*)", line) + if match: + key = match.group(1) + value = match.group(2) + + if key not in os.environ: + if key not in ["PROMPT", "Path"]: + worker.utils.info(f"Adding key {key}={value}") + os.environ[key] = value + + os.environ["PATH"] = ( + os.environ["VCToolsInstallDir"] + + vs_tool_install_dir_suffix + + os.pathsep + + os.environ["PATH"] + ) + + +def create_argument_parser(steps: worker.utils.BuilderSteps) -> argparse.ArgumentParser: + parser = worker.utils.create_argument_parser(steps=steps) + parser.add_argument("--needs-full-clean", action="store_true", required=False) + parser.add_argument("--needs-gpu-binaries", action="store_true", required=False) + parser.add_argument("--needs-gpu-tests", action="store_true", required=False) + parser.add_argument("--python-module", action="store_true", required=False) + parser.add_argument( + "--build-configuration", + default="release", + type=str, + choices=["release", "asserts", "sanitizer", "debug"], + required=False, + ) + parser.add_argument( + "--architecture", + default="", + type=str, + choices=["arm64", "x86_64", "amd64"], + required=False, + ) + return parser diff --git a/config/worker/blender/benchmark.py b/config/worker/blender/benchmark.py new file mode 100644 index 0000000..5280e9b --- /dev/null +++ b/config/worker/blender/benchmark.py @@ -0,0 +1,125 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import json +import os +import pathlib +import urllib.request +import sys + +import conf.worker + +import worker.blender +import worker.utils + + + +def create_upload( + builder: worker.blender.CodeBuilder, benchmark_path: pathlib.Path, revision: str +) -> None: + # Create package directory. + branch = builder.branch_id.replace("blender-", "").replace("-release", "") + name = f"{branch}-{builder.platform}-{builder.architecture}" + package_dir = builder.package_dir / name + + worker.utils.remove_dir(package_dir) + os.makedirs(package_dir, exist_ok=True) + + # Fetch existing summary + worker_config = conf.worker.get_config(builder.service_env_id) + base_urls = { + "LOCAL": str(worker_config.buildbot_download_folder), + "UATEST": "https://builder.uatest.blender.org/download", + "PROD": "https://builder.blender.org/download", + } + base_url = base_urls[builder.service_env_id] + + summary_json_url = f"{base_url}/daily/benchmarks/{name}/summary.json" + summary_json_path = package_dir / "summary.json" + try: + if builder.service_env_id == "LOCAL": + worker.utils.copy_file(pathlib.Path(summary_json_url), summary_json_path) + else: + urllib.request.urlretrieve(summary_json_url, summary_json_path) + except Exception as e: + error_msg = str(e) + worker.utils.warning(f"Could not retrieve benchmark summary.json: {error_msg}") + + # Create json files in package directory. + results_json_path = benchmark_path / "results.json" + revision_json_path = package_dir / f"{revision}.json" + + worker.utils.copy_file(results_json_path, revision_json_path) + + summary_json = [] + if summary_json_path.exists(): + summary_json = json.loads(summary_json_path.read_text()) + summary_json += json.loads(results_json_path.read_text()) + summary_json_path.write_text(json.dumps(summary_json, indent=2)) + + # Create html file in package directory. + report_html_path = package_dir / "report.html" + cmd = [ + sys.executable, + builder.code_path / "tests" / "performance" / "benchmark.py", + "graph", + summary_json_path, + "-o", + report_html_path, + ] + worker.utils.call(cmd) + + +def benchmark(builder: worker.blender.CodeBuilder) -> None: + # Parameters + os.chdir(builder.code_path) + revision = worker.utils.check_output(["git", "rev-parse", "HEAD"]) + revision = revision[:12] + blender_command = builder.blender_command_path() + gpu_device = "METAL" if builder.platform == "darwin" else "OPTIX" + background = False if builder.platform == "darwin" else True + + worker.utils.info(f"Benchmark revision {revision}, GPU device {gpu_device}") + + # Create clean benchmark folder + benchmark_path = builder.track_path / "benchmark" / "default" + worker.utils.remove_dir(benchmark_path) + os.makedirs(benchmark_path, exist_ok=True) + + # Initialize configuration + config_py_path = benchmark_path / "config.py" + config_py_text = f""" +devices = ["CPU", "{gpu_device}_0"] +background = {background} +builds = {{"{revision}": "{blender_command}"}} +benchmark_type = "time_series" +""" + config_py_path.write_text(config_py_text) + + # Checkout benchmark files + tests_benchmarks_path = builder.code_path / "tests" / "benchmarks" + if not tests_benchmarks_path.exists(): + benchmarks_url = "https://projects.blender.org/blender/blender-benchmarks.git" + worker.utils.call(["git", "clone", benchmarks_url, tests_benchmarks_path]) + + # Run benchmark + cmd = [ + sys.executable, + builder.code_path / "tests" / "performance" / "benchmark.py", + "list", + ] + worker.utils.call(cmd) + + cmd = [ + sys.executable, + builder.code_path / "tests" / "performance" / "benchmark.py", + "run", + "default", + ] + exit_code = worker.utils.call(cmd, exit_on_error=False) + + # Write results to be uploaded + create_upload(builder, benchmark_path, revision) + + sys.exit(exit_code) diff --git a/config/worker/blender/blender.applescript b/config/worker/blender/blender.applescript new file mode 100644 index 0000000..29b0c2c --- /dev/null +++ b/config/worker/blender/blender.applescript @@ -0,0 +1,25 @@ +tell application "Finder" + tell disk "Blender" + log "applescript: opening [Blender]. This will seem to hang with a pop up dialog on applescript permissions for the first run. You have 10 minutes, get on machine now and push that button !!!" + with timeout of 600 seconds + open + log "applescript: yay it opened !" + log "applescript: setting current view" + set current view of container window to icon view + set toolbar visible of container window to false + set statusbar visible of container window to false + set the bounds of container window to {100, 100, 640, 472} + set theViewOptions to icon view options of container window + set arrangement of theViewOptions to not arranged + set icon size of theViewOptions to 128 + set background picture of theViewOptions to file ".background:background.tif" + set position of item " " of container window to {400, 190} + set position of item "blender.app" of container window to {135, 190} + log "applescript: updating applications" + update without registering applications + delay 5 + log "applescript: closing" + close + end timeout + end tell +end tell diff --git a/config/worker/blender/bundle_dmg.py b/config/worker/blender/bundle_dmg.py new file mode 100644 index 0000000..cee3a33 --- /dev/null +++ b/config/worker/blender/bundle_dmg.py @@ -0,0 +1,473 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import os +import re +import time +import subprocess +import platform +import pathlib +import tempfile +import typing + +import worker.utils + +# Extra size which is added on top of actual files size when estimating size +# of destination DNG. +_extra_dmg_size_in_bytes = 800 * 1024 * 1024 + +################################################################################ +# Common utilities + + +def get_directory_size(root_directory: pathlib.Path) -> int: + """ + Get size of directory on disk + """ + + total_size = 0 + for file in root_directory.glob("**/*"): + total_size += file.lstat().st_size + return total_size + + +################################################################################ +# DMG bundling specific logic + + +def collect_app_bundles(source_dir: pathlib.Path) -> typing.List[pathlib.Path]: + """ + Collect all app bundles which are to be put into DMG + + If the source directory points to FOO.app it will be the only app bundle + packed. + + Otherwise all .app bundles from given directory are placed to a single + DMG. + """ + + if source_dir.name.endswith(".app"): + return [source_dir] + + app_bundles = [] + for filename in source_dir.glob("*"): + if not filename.is_dir(): + continue + if not filename.name.endswith(".app"): + continue + + app_bundles.append(filename) + + return app_bundles + + +def collect_and_log_app_bundles(source_dir: pathlib.Path) -> typing.List[pathlib.Path]: + app_bundles = collect_app_bundles(source_dir) + + if not app_bundles: + worker.utils.info("No app bundles found for packing") + return [] + + worker.utils.info(f"Found {len(app_bundles)} to pack:") + for app_bundle in app_bundles: + worker.utils.info(f"- {app_bundle}") + + return app_bundles + + +def estimate_dmg_size(app_bundles: typing.List[pathlib.Path]) -> int: + """ + Estimate size of DMG to hold requested app bundles + + The size is based on actual size of all files in all bundles plus some + space to compensate for different size-on-disk plus some space to hold + codesign signatures. + + Is better to be on a high side since the empty space is compressed, but + lack of space might cause silent failures later on. + """ + + app_bundles_size = 0 + for app_bundle in app_bundles: + app_bundles_size += get_directory_size(app_bundle) + + return app_bundles_size + _extra_dmg_size_in_bytes + + +def copy_app_bundles(app_bundles: typing.List[pathlib.Path], dir_path: pathlib.Path) -> None: + """ + Copy all bundles to a given directory + + This directory is what the DMG will be created from. + """ + for app_bundle in app_bundles: + destination_dir_path = dir_path / app_bundle.name + + worker.utils.info(f"Copying app bundle [{app_bundle}] to [{dir_path}]") + + worker.utils.copy_dir(app_bundle, destination_dir_path) + + # Only chmod if we can;t get cmake install to do it - james + # for r, d, f in os.walk(destination_dir_path): + # worker.utils.info(f'chmoding [{r}] -> 0o755') + # os.chmod(r, 0o755) + + +def get_main_app_bundle(app_bundles: typing.List[pathlib.Path]) -> pathlib.Path: + """ + Get application bundle main for the installation + """ + return app_bundles[0] + + +def create_dmg_image( + app_bundles: typing.List[pathlib.Path], dmg_file_path: pathlib.Path, volume_name: str +) -> None: + """ + Create DMG disk image and put app bundles in it + + No DMG configuration or codesigning is happening here. + """ + if dmg_file_path.exists(): + worker.utils.info(f"Removing existing writable DMG {dmg_file_path}...") + worker.utils.remove_file(dmg_file_path) + + temp_content_path = tempfile.TemporaryDirectory(prefix="blender-dmg-content-") + worker.utils.info(f"Preparing directory with app bundles for the DMG [{temp_content_path}]") + with temp_content_path as content_dir_str: + # Copy all bundles to a clean directory. + content_dir_path = pathlib.Path(content_dir_str) + # worker.utils.info(f'content_dir_path={content_dir_path}') + copy_app_bundles(app_bundles, content_dir_path) + + # Estimate size of the DMG. + dmg_size = estimate_dmg_size(app_bundles) + worker.utils.info(f"Estimated DMG size: [{dmg_size:,}] bytes.") + + # Create the DMG. + worker.utils.info(f"Creating writable DMG [{dmg_file_path}]") + command = ( + "hdiutil", + "create", + "-size", + str(dmg_size), + "-fs", + "HFS+", + "-srcfolder", + content_dir_path, + "-volname", + volume_name, + "-format", + "UDRW", + "-mode", + "755", + dmg_file_path, + ) + + worker.utils.call(command) + + +def get_writable_dmg_file_path(dmg_file_path: pathlib.Path) -> pathlib.Path: + """ + Get file path for writable DMG image + """ + parent = dmg_file_path.parent + return parent / (dmg_file_path.stem + "-temp.dmg") + + +def mount_readwrite_dmg(dmg_file_path: pathlib.Path) -> None: + """ + Mount writable DMG + + Mounting point would be /Volumes/ + """ + + worker.utils.info(f"Mounting read-write DMG ${dmg_file_path}") + cmd: worker.utils.CmdSequence = [ + "hdiutil", + "attach", + "-readwrite", + "-noverify", + "-noautoopen", + dmg_file_path, + ] + worker.utils.call(cmd) + + +def get_mount_directory_for_volume_name(volume_name: str) -> pathlib.Path: + """ + Get directory under which the volume will be mounted + """ + + return pathlib.Path("/Volumes") / volume_name + + +def eject_volume(volume_name: str) -> None: + """ + Eject given volume, if mounted + """ + mount_directory = get_mount_directory_for_volume_name(volume_name) + if not mount_directory.exists(): + return + mount_directory_str = str(mount_directory) + + worker.utils.info(f"Ejecting volume [{volume_name}]") + + # First try through Finder, as sometimes diskutil fails for unknown reasons. + command = [ + "osascript", + "-e", + f"""tell application "Finder" to eject (every disk whose name is "{volume_name}")""", + ] + worker.utils.call(command) + if not mount_directory.exists(): + return + + # Figure out which device to eject. + mount_output = subprocess.check_output(["mount"]).decode() + device = "" + for line in mount_output.splitlines(): + if f"on {mount_directory_str} (" not in line: + continue + tokens = line.split(" ", 3) + if len(tokens) < 3: + continue + if tokens[1] != "on": + continue + if device: + raise Exception(f"Multiple devices found for mounting point [{mount_directory}]") + device = tokens[0] + + if not device: + raise Exception(f"No device found for mounting point [{mount_directory}]") + + worker.utils.info(f"[{mount_directory}] is mounted as device [{device}], ejecting...") + command = ["diskutil", "eject", device] + worker.utils.call(command) + + +def copy_background_if_needed( + background_image_file_path: pathlib.Path, mount_directory: pathlib.Path +) -> None: + """ + Copy background to the DMG + + If the background image is not specified it will not be copied. + """ + + if not background_image_file_path: + worker.utils.info("No background image provided.") + return + + destination_dir = mount_directory / ".background" + destination_dir.mkdir(exist_ok=True) + + destination_file_path = destination_dir / background_image_file_path.name + + worker.utils.info( + f"Copying background image [{background_image_file_path}] to [{destination_file_path}]" + ) + worker.utils.copy_file(background_image_file_path, destination_file_path) + + +def create_applications_link(mount_directory: pathlib.Path) -> None: + """ + Create link to /Applications in the given location + """ + worker.utils.info(f"Creating link to /Applications -> {mount_directory}") + target_path = mount_directory / " " + cmd: worker.utils.CmdSequence = ["ln", "-s", "/Applications", target_path] + worker.utils.call(cmd) + + +def run_applescript_file_path( + applescript_file_path: pathlib.Path, + volume_name: str, + app_bundles: typing.List[pathlib.Path], + background_image_file_path: pathlib.Path, +) -> None: + """ + Run given applescript to adjust look and feel of the DMG + """ + main_app_bundle = get_main_app_bundle(app_bundles) + + architecture = platform.machine().lower() + # needs_run_applescript = (architecture != "x86_64") + needs_run_applescript = True + + if not needs_run_applescript: + worker.utils.info(f"Having issues with apple script on [{architecture}], skipping !") + return + + temp_script_file_path = tempfile.NamedTemporaryFile(mode="w", suffix=".applescript") + with temp_script_file_path as temp_applescript_file: + worker.utils.info( + f"Adjusting applescript [{temp_script_file_path.name}] for volume name [{volume_name}]" + ) + # Adjust script to the specific volume name. + with open(applescript_file_path, mode="r") as input_file: + worker.utils.info("Start script update") + for line in input_file.readlines(): + stripped_line = line.strip() + if stripped_line.startswith("tell disk"): + line = re.sub('tell disk ".*"', f'tell disk "{volume_name}"', line) + elif stripped_line.startswith("set background picture"): + if not background_image_file_path: + continue + else: + background_image_short = f".background:{background_image_file_path.name}" + line = re.sub('to file ".*"', f'to file "{background_image_short}"', line) + line = line.replace("blender.app", main_app_bundle.name) + stripped_line = line.rstrip("\r\n") + worker.utils.info(f"line={stripped_line}") + temp_applescript_file.write(line) + + temp_applescript_file.flush() + worker.utils.info("End script update") + + # This does not help issues when running applescript + worker.utils.info("Updating permissions") + os.chmod(temp_script_file_path.name, 0o755) + + # Setting flags to this applescript will fail execution, not permitted + # command = ['chflags', "uchg", temp_script_file_path.name] + # worker.utils.call(command) + + command = ["osascript", "-s", "o", temp_script_file_path.name] + worker.utils.call(command) + + # NOTE: This is copied from bundle.sh. The exact reason for sleep is + # still remained a mystery. + worker.utils.info("Waiting for applescript...") + time.sleep(5) + + +def compress_dmg(writable_dmg_file_path: pathlib.Path, final_dmg_file_path: pathlib.Path) -> None: + """ + Compress temporary read-write DMG + """ + cmd: worker.utils.CmdSequence = [ + "hdiutil", + "convert", + writable_dmg_file_path, + "-format", + "UDZO", + "-o", + final_dmg_file_path, + ] + + if final_dmg_file_path.exists(): + worker.utils.info(f"Removing old compressed DMG [{final_dmg_file_path}]") + worker.utils.remove_file(final_dmg_file_path) + + worker.utils.info("Compressing disk image...") + worker.utils.call(cmd) + + +def create_final_dmg( + app_bundles: typing.List[pathlib.Path], + dmg_file_path: pathlib.Path, + background_image_file_path: pathlib.Path, + volume_name: str, + applescript_file_path: pathlib.Path, +) -> None: + """ + Create DMG with all app bundles + + Will take care configuring background + """ + + worker.utils.info("Running all routines to create final DMG") + + writable_dmg_file_path = get_writable_dmg_file_path(dmg_file_path) + worker.utils.info(f"Mouting volume [{volume_name}]") + mount_directory = get_mount_directory_for_volume_name(volume_name) + worker.utils.info(f"Mount at [{mount_directory}]") + + # Make sure volume is not mounted. + # If it is mounted it will prevent removing old DMG files and could make + # it so app bundles are copied to the wrong place. + eject_volume(volume_name) + + worker.utils.info(f"Creating image [{writable_dmg_file_path}] to [{volume_name}]") + create_dmg_image(app_bundles, writable_dmg_file_path, volume_name) + + worker.utils.info(f"Mount r/w mode [{writable_dmg_file_path}]") + mount_readwrite_dmg(writable_dmg_file_path) + + copy_background_if_needed(background_image_file_path, mount_directory) + create_applications_link(mount_directory) + + run_applescript_file_path( + applescript_file_path, volume_name, app_bundles, background_image_file_path + ) + + eject_volume(volume_name) + + compress_dmg(writable_dmg_file_path, dmg_file_path) + worker.utils.remove_file(writable_dmg_file_path) + + +def ensure_dmg_extension(filepath: pathlib.Path) -> pathlib.Path: + """ + Make sure given file have .dmg extension + """ + + if filepath.suffix != ".dmg": + return filepath.with_suffix(f"{filepath.suffix}.dmg") + return filepath + + +def get_dmg_file_path( + requested_file_path: pathlib.Path, app_bundles: typing.List[pathlib.Path] +) -> pathlib.Path: + """ + Get full file path for the final DMG image + + Will use the provided one when possible, otherwise will deduct it from + app bundles. + + If the name is deducted, the DMG is stored in the current directory. + """ + + if requested_file_path: + return ensure_dmg_extension(requested_file_path.absolute()) + + # TODO(sergey): This is not necessarily the main one. + main_bundle = app_bundles[0] + # Strip .app from the name + return pathlib.Path(main_bundle.name[:-4] + ".dmg").absolute() + + +def get_volume_name_from_dmg_file_path(dmg_file_path: pathlib.Path) -> str: + """ + Deduct volume name from the DMG path + + Will use first part of the DMG file name prior to dash. + """ + + tokens = dmg_file_path.stem.split("-") + words = tokens[0].split() + + return " ".join(word.capitalize() for word in words) + + +def bundle( + source_dir: pathlib.Path, + dmg_file_path: pathlib.Path, + applescript_file_path: pathlib.Path, + background_image_file_path: pathlib.Path, +) -> None: + app_bundles = collect_and_log_app_bundles(source_dir) + for app_bundle in app_bundles: + worker.utils.info(f"App bundle path [{app_bundle}]") + + dmg_file_path = get_dmg_file_path(dmg_file_path, app_bundles) + volume_name = get_volume_name_from_dmg_file_path(dmg_file_path) + + worker.utils.info(f"Will produce DMG [{dmg_file_path.name}]") + + create_final_dmg( + app_bundles, dmg_file_path, background_image_file_path, volume_name, applescript_file_path + ) diff --git a/config/worker/blender/compile.py b/config/worker/blender/compile.py new file mode 100644 index 0000000..07ff990 --- /dev/null +++ b/config/worker/blender/compile.py @@ -0,0 +1,534 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import multiprocessing +import os +import platform +import pathlib + +from typing import Dict +from pathlib import Path + +import worker.blender +import worker.utils + + +def fetch_ideal_cpu_count(estimate_core_memory_in_mb: int) -> int: + """Fetch cpu ideal for the building process based on machine info""" + worker.utils.info(f"estimate_core_memory_in_mb={estimate_core_memory_in_mb}") + + total_cpu_count = multiprocessing.cpu_count() + worker.utils.info(f"total_cpu_count={total_cpu_count}") + + ideal_cpu_count = total_cpu_count + spare_cpu_count = 2 + + if platform.system().lower() != "darwin": + worker.utils.info(f"In current path {os.getcwd()}") + import psutil + + virtual_memory = psutil.virtual_memory() + worker.utils.info(f"virtual_memory={virtual_memory}") + + total_memory_in_bytes = virtual_memory.total + worker.utils.info(f"total_memory_in_bytes={total_memory_in_bytes}") + + available_memory_in_bytes = virtual_memory.available + worker.utils.info(f"available_memory_in_bytes={available_memory_in_bytes}") + + usable_memory_in_bytes = available_memory_in_bytes + worker.utils.info(f"usable_memory_in_bytes={usable_memory_in_bytes}") + + estimate_memory_per_code_in_bytes = estimate_core_memory_in_mb * 1024 * 1024 + worker.utils.info(f"estimate_memory_per_code_in_bytes={estimate_memory_per_code_in_bytes}") + + capable_cpu_count = int(total_memory_in_bytes / estimate_memory_per_code_in_bytes) + worker.utils.info(f"capable_cpu_count={capable_cpu_count}") + + min_cpu_count = min(total_cpu_count, capable_cpu_count) + worker.utils.info(f"min_cpu_count={min_cpu_count}") + + ideal_cpu_count = min_cpu_count if min_cpu_count <= 8 else min_cpu_count - spare_cpu_count + worker.utils.info(f"ideal_cpu_count={ideal_cpu_count}") + return ideal_cpu_count + + +def get_cmake_build_type(builder: worker.blender.CodeBuilder) -> str: + if builder.build_configuration == "debug": + return "Debug" + elif builder.build_configuration == "sanitizer": + # No reliable ASAN on Windows currently. + if builder.platform != "windows": + return "RelWithDebInfo" + else: + return "Release" + else: + return "Release" + + +def get_cmake_options(builder: worker.blender.CodeBuilder) -> worker.utils.CmdSequence: + needs_gtest_compile = not builder.python_module + + with_gtests_state = "ON" if needs_gtest_compile else "OFF" + with_gpu_binaries_state = "ON" if builder.needs_gpu_binaries else "OFF" + with_gpu_tests = False + + buildbotConfig = builder.pipeline_config() + + # This is meant for stable build compilation + config_file_path = "build_files/cmake/config/blender_release.cmake" + + platform_config_file_path = None + if builder.platform == "darwin": + platform_config_file_path = "build_files/buildbot/config/blender_macos.cmake" + elif builder.platform == "linux": + platform_config_file_path = "build_files/buildbot/config/blender_linux.cmake" + elif builder.platform == "windows": + platform_config_file_path = "build_files/buildbot/config/blender_windows.cmake" + + if platform_config_file_path: + worker.utils.info(f'Trying platform-specific buildbot configuration "{platform_config_file_path}"') + if (Path(builder.blender_dir) / platform_config_file_path).exists(): + worker.utils.info(f'Using platform-specific buildbot configuration "{platform_config_file_path}"') + config_file_path = platform_config_file_path + else: + worker.utils.info(f'Using generic buildbot configuration "{config_file_path}"') + + # Must be first so that we can override some of the options found in the file + options = ["-C", os.path.join(builder.blender_dir, config_file_path)] + + # Optional build as Python module. + if builder.python_module: + bpy_config_file_path = "build_files/cmake/config/bpy_module.cmake" + options += ["-C", os.path.join(builder.blender_dir, bpy_config_file_path)] + options += ["-DWITH_INSTALL_PORTABLE=ON"] + + can_enable_oneapi_binaries = True + if builder.service_env_id != "PROD": + # UATEST machines are too slow currently. + worker.utils.info(f'Disabling oneAPI binaries on "{builder.service_env_id}"') + can_enable_oneapi_binaries = False + if builder.patch_id: + # No enough throughput of the systems to cover AoT oneAPI binaries for patches. + worker.utils.info("Disabling oneAPI binaries for patch build") + can_enable_oneapi_binaries = False + if builder.track_id == "vexp": + # Only enable AoT oneAPI binaries for main and release branches. + worker.utils.info("Disabling oneAPI binaries for branch build") + can_enable_oneapi_binaries = False + + # Add platform specific generator and configs + if builder.platform == "darwin": + if builder.needs_ninja: + options += ["-G", "Ninja"] + else: + options += ["-G", "Unix Makefiles"] + + options += [f"-DCMAKE_OSX_ARCHITECTURES:STRING={builder.architecture}"] + + elif builder.platform == "linux": + if builder.needs_ninja: + options += ["-G", "Ninja"] + else: + options += ["-G", "Unix Makefiles"] + + elif builder.platform == "windows": + if builder.needs_ninja: + # set CC=%LLVM_DIR%\bin\clang-cl + # set CXX=%LLVM_DIR%\bin\clang-cl + # set CFLAGS=-m64 -fmsc-version=1922 + # set CXXFLAGS=-m64 -fmsc-version=1922 + vc_tools_install_dir = os.environ.get("VCToolsInstallDir") + if not vc_tools_install_dir: + raise BaseException("Missing environment variable VCToolsInstallDir") + + vc_tool_install_path = pathlib.PureWindowsPath(vc_tools_install_dir) + if builder.architecture == "arm64": + compiler_file_path="C:/Program Files/LLVM/bin/clang-cl.exe" + compiler_file_path="C:/Program Files/LLVM/bin/clang-cl.exe" + linker_file_path="C:/Program Files/LLVM/bin/lld-link.exe" + else: + vs_tool_install_dir_suffix = "bin/Hostx64/x64" + compiler_file_path = str(vc_tool_install_path / f"{vs_tool_install_dir_suffix}/cl.exe") + linker_file_path = str(vc_tool_install_path / f"{vs_tool_install_dir_suffix}/link.exe") + + options += ["-G", "Ninja"] + # -DWITH_WINDOWS_SCCACHE=On + options += [ + f"-DCMAKE_C_COMPILER:FILEPATH={compiler_file_path}", + f"-DCMAKE_CXX_COMPILER:FILEPATH={compiler_file_path}", + ] + # options += ["-DCMAKE_EXE_LINKER_FLAGS:STRING=/machine:x64"] + options += [f"-DCMAKE_LINKER:FILEPATH={linker_file_path}"] + # Skip the test, it does not work + options += ["-DCMAKE_C_COMPILER_WORKS=1"] + options += ["-DCMAKE_CXX_COMPILER_WORKS=1"] + + else: + if builder.architecture == "arm64": + options += ["-G", "Visual Studio 17 2022", "-A", "arm64"] + else: + options += ["-G", "Visual Studio 16 2019", "-A", "x64"] + + # Add configured overrides + platform_architecure = f"{builder.platform}-{builder.architecture}" + + cmake_overrides: Dict[str, str] = {} + cmake_overrides.update(buildbotConfig["cmake"]["default"]["overrides"]) + cmake_overrides.update(buildbotConfig["cmake"][platform_architecure]["overrides"]) + + # Disallow certain options + restricted_key_patterns = [ + "POSTINSTALL_SCRIPT", + "OPTIX_", + "CMAKE_OSX_ARCHITECTURES", + "CMAKE_BUILD_TYPE", + "CMAKE_INSTALL_PREFIX", + "WITH_GTESTS", + "CUDA", + "WITH_CYCLES", + "CYCLES_CUDA", + ] + + for cmake_key in cmake_overrides.keys(): + for restricted_key_pattern in restricted_key_patterns: + if restricted_key_pattern in cmake_key: + raise Exception(f"CMake key [{cmake_key}] cannot be overriden, aborting") + + for cmake_key, cmake_value in cmake_overrides.items(): + options += [f"-D{cmake_key}={cmake_value}"] + + cmake_build_type = get_cmake_build_type(builder) + options += [f"-DCMAKE_BUILD_TYPE:STRING={cmake_build_type}"] + + if builder.build_configuration == "sanitizer": + # No reliable ASAN on Windows currently. + if builder.platform != "windows": + options += ["-DWITH_COMPILER_ASAN=ON"] + options += ["-DWITH_ASSERT_RELEASE=ON"] + # Avoid buildbot timeouts, see blender/blender#116635. + options += ["-DWITH_UNITY_BUILD=OFF"] + elif builder.build_configuration == "asserts": + options += ["-DWITH_ASSERT_RELEASE=ON"] + + options += [f"-DCMAKE_INSTALL_PREFIX={builder.install_dir}"] + + options += ["-DWITH_INSTALL_COPYRIGHT=ON"] + + options += [f"-DWITH_GTESTS={with_gtests_state}"] + + if builder.platform == "windows": + if builder.architecture != "arm64": + # CUDA + HIP + oneAPI on Windows + options += [f"-DWITH_CYCLES_CUDA_BINARIES={with_gpu_binaries_state}"] + options += [f"-DWITH_CYCLES_HIP_BINARIES={with_gpu_binaries_state}"] + if can_enable_oneapi_binaries: + options += [f"-DWITH_CYCLES_ONEAPI_BINARIES={with_gpu_binaries_state}"] + options += ["-DSYCL_OFFLINE_COMPILER_PARALLEL_JOBS=2"] + else: + options += ["-DWITH_CYCLES_ONEAPI_BINARIES=OFF"] + if "hip" in buildbotConfig: + hip_version = buildbotConfig["hip"]["version"] + else: + hip_version = "5.2.21440" + if "ocloc" in buildbotConfig: + ocloc_version = buildbotConfig["ocloc"]["version"] + else: + ocloc_version = "dev_01" + options += [f"-DHIP_ROOT_DIR=C:/ProgramData/AMD/HIP/hip_sdk_{hip_version}"] + options += ["-DHIP_PERL_DIR=C:/ProgramData/AMD/HIP/strawberry/perl/bin"] + options += [f"-DOCLOC_INSTALL_DIR=C:/ProgramData/Intel/ocloc/ocloc_{ocloc_version}"] + elif builder.platform == "linux": + # CUDA on Linux + options += [f"-DWITH_CYCLES_CUDA_BINARIES={with_gpu_binaries_state}"] + options += [f"-DWITH_CYCLES_HIP_BINARIES={with_gpu_binaries_state}"] + if can_enable_oneapi_binaries: + options += [f"-DWITH_CYCLES_ONEAPI_BINARIES={with_gpu_binaries_state}"] + options += ["-DSYCL_OFFLINE_COMPILER_PARALLEL_JOBS=2"] + else: + options += ["-DWITH_CYCLES_ONEAPI_BINARIES=OFF"] + + # Directory changed to just /opt/rocm in 6.x + rocm_path = pathlib.Path("/opt/rocm/hip") + if not rocm_path.exists(): + rocm_path = pathlib.Path("/opt/rocm") + options += [f"-DHIP_ROOT_DIR:PATH={rocm_path}"] + + # GPU render tests support Linux + NVIDIA currently + if builder.needs_gpu_tests: + with_gpu_tests = True + if builder.needs_gpu_binaries: + options += ["-DCYCLES_TEST_DEVICES=CPU;OPTIX"] + elif builder.platform == "darwin": + # Metal on macOS + if builder.architecture == "arm64": + if builder.needs_gpu_tests: + with_gpu_tests = True + options += ["-DCYCLES_TEST_DEVICES=CPU;METAL"] + + if with_gpu_tests: + # Needs X11 or Wayland, and fails with xvfb to emulate X11. + # options += [f"-DWITH_GPU_DRAW_TESTS=ON"] + options += ["-DWITH_GPU_RENDER_TESTS=ON"] + options += ["-DWITH_GPU_RENDER_TESTS_SILENT=OFF"] + options += ["-DWITH_COMPOSITOR_REALTIME_TESTS=ON"] + + if "optix" in buildbotConfig: + optix_version = buildbotConfig["optix"]["version"] + + if builder.platform == "windows" and builder.architecture != "arm64": + options += [ + f"-DOPTIX_ROOT_DIR:PATH=C:/ProgramData/NVIDIA Corporation/OptiX SDK {optix_version}" + ] + elif builder.platform == "linux": + optix_base_dir = pathlib.Path.home() / ".devops" / "apps" + options += [ + f"-DOPTIX_ROOT_DIR:PATH={optix_base_dir}/NVIDIA-OptiX-SDK-{optix_version}-linux64-x86_64" + ] + + # Blender 4.3 has switched to pre-compiled HIP-RT libraries. + if "hiprt" in buildbotConfig: + hiprt_version = buildbotConfig["hiprt"]["version"] + + if builder.platform == "windows" and builder.architecture != "arm64": + options += [ + f"-DHIPRT_ROOT_DIR:PATH=C:/ProgramData/AMD/HIP/hiprtsdk-{hiprt_version}/hiprt{hiprt_version}" + ] + elif builder.platform == "linux": + hiprt_base_dir = pathlib.Path.home() / ".devops" / "apps" + options += [ + f"-DHIPRT_ROOT_DIR:PATH={hiprt_base_dir}/hiprtsdk-{hiprt_version}/hiprt{hiprt_version}" + ] + + # Enable option to verify enabled libraries and features did not get disabled. + options += ["-DWITH_STRICT_BUILD_OPTIONS=ON"] + + needs_cuda_compile = builder.needs_gpu_binaries + if builder.needs_gpu_binaries: + try: + cuda10_version = buildbotConfig["cuda10"]["version"] + except: + cuda10_version = buildbotConfig["sdks"]["cuda10"]["version"] + + cuda10_folder_version = ".".join(cuda10_version.split(".")[:2]) + + try: + cuda11_version = buildbotConfig["cuda11"]["version"] + except: + cuda11_version = buildbotConfig["sdks"]["cuda11"]["version"] + + cuda11_folder_version = ".".join(cuda11_version.split(".")[:2]) + + try: + cuda12_version = buildbotConfig["cuda12"]["version"] + cuda12_folder_version = ".".join(cuda12_version.split(".")[:2]) + have_cuda12 = True + except: + have_cuda12 = False + + if builder.platform == "windows" and builder.architecture != "arm64": + # CUDA 10 + cuda10_path = pathlib.Path( + f"C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v{cuda10_folder_version}" + ) + if not cuda10_path.exists(): + raise Exception( + f"Was not able to find CUDA path [{cuda10_path}] for version [{cuda10_version}], aborting" + ) + cuda10_file_path = cuda10_path / "bin" / "nvcc.exe" + + options += [f"-DCUDA10_TOOLKIT_ROOT_DIR:PATH={cuda10_path}"] + options += [f"-DCUDA10_NVCC_EXECUTABLE:FILEPATH={cuda10_file_path}"] + + # CUDA 11 + cuda11_path = pathlib.Path( + f"C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v{cuda11_folder_version}" + ) + if not cuda11_path.exists(): + raise Exception( + f"Was not able to find CUDA path [{cuda11_path}] for version [{cuda11_version}], aborting" + ) + cuda11_file_path = cuda11_path / "bin" / "nvcc.exe" + + # CUDA 12 + if have_cuda12: + cuda12_path = pathlib.Path( + f"C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v{cuda12_folder_version}" + ) + if not cuda12_path.exists(): + raise Exception( + f"Was not able to find CUDA path [{cuda12_path}] for version [{cuda12_version}], aborting" + ) + cuda12_file_path = cuda12_path / "bin" / "nvcc.exe" + + options += [f"-DCUDA11_TOOLKIT_ROOT_DIR:PATH={cuda11_path}"] + options += [f"-DCUDA11_NVCC_EXECUTABLE:FILEPATH={cuda11_file_path}"] + + options += [f"-DCUDA_TOOLKIT_ROOT_DIR:PATH={cuda12_path}"] + options += [f"-DCUDA_NVCC_EXECUTABLE:FILEPATH={cuda12_file_path}"] + else: + options += [f"-DCUDA_TOOLKIT_ROOT_DIR:PATH={cuda11_path}"] + options += [f"-DCUDA_NVCC_EXECUTABLE:FILEPATH={cuda11_file_path}"] + + elif builder.platform == "linux": + # CUDA 10 + cuda10_path = pathlib.Path(f"/usr/local/cuda-{cuda10_folder_version}") + if not cuda10_path.exists(): + raise Exception( + f"Was not able to find CUDA path [{cuda10_path}] for version [{cuda10_version}], aborting" + ) + cuda10_file_path = cuda10_path / "bin" / "nvcc" + + # CUDA 11 + cuda11_path = pathlib.Path(f"/usr/local/cuda-{cuda11_folder_version}") + if not cuda11_path.exists(): + raise Exception( + f"Was not able to find CUDA path [{cuda11_path}] for version [{cuda11_version}], aborting" + ) + cuda11_file_path = cuda11_path / "bin" / "nvcc" + + # CUDA 12 + if have_cuda12: + cuda12_path = pathlib.Path(f"/usr/local/cuda-{cuda12_folder_version}") + if not cuda12_path.exists(): + raise Exception( + f"Was not able to find CUDA path [{cuda12_path}] for version [{cuda12_version}], aborting" + ) + cuda12_file_path = cuda12_path / "bin" / "nvcc" + + # CUDA 10, must provide compatible host compiler. + options += [f"-DCUDA10_TOOLKIT_ROOT_DIR:PATH={cuda10_path}"] + + if pathlib.Path( + "/etc/rocky-release" + ).exists(): # We check for Rocky. Version 8 has GCC 8 in /usr/bin + options += [f"-DCUDA10_NVCC_EXECUTABLE:STRING={cuda10_file_path}"] + options += ["-DCUDA_HOST_COMPILER=/usr/bin/gcc"] + else: + # Use new CMake option. + options += [f"-DCUDA10_NVCC_EXECUTABLE:STRING={cuda10_file_path}"] + options += ["-DCUDA_HOST_COMPILER=/opt/rh/devtoolset-8/root/usr/bin/gcc"] + + # CUDA 11 or 12. + if have_cuda12: + options += [f"-DCUDA11_TOOLKIT_ROOT_DIR:PATH={cuda11_path}"] + options += [f"-DCUDA11_NVCC_EXECUTABLE:STRING={cuda11_file_path}"] + + options += [f"-DCUDA_TOOLKIT_ROOT_DIR:PATH={cuda12_path}"] + options += [f"-DCUDA_NVCC_EXECUTABLE:FILEPATH={cuda12_file_path}"] + else: + options += [f"-DCUDA_TOOLKIT_ROOT_DIR:PATH={cuda11_path}"] + options += [f"-DCUDA_NVCC_EXECUTABLE:FILEPATH={cuda11_file_path}"] + + else: + worker.utils.info("Skipping gpu compilation as requested") + + return options + + +def clean_directories(builder: worker.blender.CodeBuilder) -> None: + worker.utils.info(f"Cleaning directory [{builder.install_dir})] from the previous run") + worker.utils.remove_dir(builder.install_dir) + + os.makedirs(builder.build_dir, exist_ok=True) + + worker.utils.info("Remove buildinfo files to re-generate them") + for build_info_file_name in ( + "buildinfo.h", + "buildinfo.h.txt", + ): + full_path = builder.build_dir / "source" / "creator" / build_info_file_name + if full_path.exists(): + worker.utils.info(f"Removing file [{full_path}]") + worker.utils.remove_file(full_path) + + +def cmake_configure(builder: worker.blender.CodeBuilder) -> None: + cmake_cache_file_path = builder.build_dir / "CMakeCache.txt" + if cmake_cache_file_path.exists(): + worker.utils.info("Removing CMake cache") + worker.utils.remove_file(cmake_cache_file_path) + + worker.utils.info("CMake configure options") + cmake_options = get_cmake_options(builder) + cmd = ["cmake", "-S", builder.blender_dir, "-B", builder.build_dir] + list(cmake_options) + builder.call(cmd) + + # This hack does not work as expected, since cmake cache is the always updated, we end up recompiling on each compile step, code, gpu and install + needs_cmake_cache_hack = False + if needs_cmake_cache_hack and pathlib.Path("/usr/lib64/libpthread.a").exists(): + # HACK: The detection for lib pthread does not work on CentOS 7 + worker.utils.warning(f"Hacking file [{cmake_cache_file_path}]") + tmp_cmake_cache_file_path = builder.build_dir / "CMakeCache.txt.tmp" + fin = open(cmake_cache_file_path) + fout = open(tmp_cmake_cache_file_path, "wt") + for line in fin: + # worker.utils.info(line) + if "OpenMP_pthread_LIBRARY:FILEPATH=OpenMP_pthread_LIBRARY-NOTFOUND" in line: + worker.utils.warning( + "Replacing [OpenMP_pthread_LIBRARY-NOTFOUND] to [/usr/lib64/libpthread.a]" + ) + line = line.replace( + "OpenMP_pthread_LIBRARY:FILEPATH=OpenMP_pthread_LIBRARY-NOTFOUND", + "OpenMP_pthread_LIBRARY:FILEPATH=/usr/lib64/libpthread.a", + ) + fout.write(line) + fin.close() + fout.close() + worker.utils.warning(f"Updating [{cmake_cache_file_path}]") + os.replace(tmp_cmake_cache_file_path, cmake_cache_file_path) + + +def cmake_build(builder: worker.blender.CodeBuilder, do_install: bool) -> None: + if builder.track_id in ["vdev", "v430"]: + if builder.platform == "windows": + estimate_gpu_memory_in_mb = 6000 + else: + estimate_gpu_memory_in_mb = 4000 + else: + estimate_gpu_memory_in_mb = 6000 + + estimate_core_memory_in_mb = estimate_gpu_memory_in_mb if builder.needs_gpu_binaries else 1000 + ideal_cpu_count = fetch_ideal_cpu_count(estimate_core_memory_in_mb) + + # Enable verbose building to make ninja to output more often. + # It should help with slow build commands like OneAPI, as well as will help + # troubleshooting situations when the compile-gpu step times out. + needs_verbose = builder.needs_gpu_binaries + + build_type = get_cmake_build_type(builder) + cmd = ["cmake", "--build", builder.build_dir, "--config", build_type] + cmd += ["--parallel", f"{ideal_cpu_count}"] + if do_install: + cmd += ["--target", "install"] + + if needs_verbose: + cmd += ["--verbose"] + + builder.call(cmd) + + +def compile_code(builder: worker.blender.CodeBuilder) -> None: + builder.needs_gpu_binaries = False + builder.setup_build_environment() + clean_directories(builder) + cmake_configure(builder) + cmake_build(builder, False) + + +def compile_gpu(builder: worker.blender.CodeBuilder) -> None: + if builder.platform == "darwin": + worker.utils.info("Compile GPU not required on macOS") + return + + builder.needs_gpu_binaries = True + builder.setup_build_environment() + cmake_configure(builder) + cmake_build(builder, False) + + +def compile_install(builder: worker.blender.CodeBuilder) -> None: + builder.setup_build_environment() + cmake_configure(builder) + cmake_build(builder, True) diff --git a/config/worker/blender/cpack_post.cmake b/config/worker/blender/cpack_post.cmake new file mode 100644 index 0000000..ce44bef --- /dev/null +++ b/config/worker/blender/cpack_post.cmake @@ -0,0 +1,34 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### + +# This is a script which is used as POST-INSTALL one for regular CMake's +# INSTALL target. +# +# It is used by buildbot workers to sign every binary which is going into +# the final bundle. +# + +execute_process( + COMMAND python "${CMAKE_CURRENT_LIST_DIR}/cpack_post.py" "${CMAKE_INSTALL_PREFIX}" + WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR} + RESULT_VARIABLE exit_code +) + +if(NOT exit_code EQUAL "0") + message(FATAL_ERROR "Non-zero exit code of codesign tool") +endif() diff --git a/config/worker/blender/cpack_post.py b/config/worker/blender/cpack_post.py new file mode 100644 index 0000000..e08dbc6 --- /dev/null +++ b/config/worker/blender/cpack_post.py @@ -0,0 +1,30 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import pathlib +import sys + +sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent.parent)) + +import worker.blender.sign +import worker.utils + +path = pathlib.Path(sys.argv[1]).resolve() + +worker.blender.sign.sign_windows("PROD", path) + +if str(path).find("Unspecified") != -1: + print("Probably running with cpack command, adding Blender path") + blender_path = path.parent / "Blender" + worker.blender.sign.sign_windows("PROD", blender_path) + +print("Codesign for cpack is finished") + +# Only do this for zip +if str(path).find("ZIP") != -1: + new_path = path.parent / path.name.replace("-windows64", "") + package_file_path = new_path.parent / (new_path.name + ".zip") + + worker.utils.call(["7z", "a", "-tzip", package_file_path, path, "-r"]) + worker.utils.call(["7z", "rn", package_file_path, path.name, new_path.name]) diff --git a/config/worker/blender/lint.py b/config/worker/blender/lint.py new file mode 100644 index 0000000..5c0afcd --- /dev/null +++ b/config/worker/blender/lint.py @@ -0,0 +1,45 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import os +import sys + +import worker.blender +import worker.utils + + +def make_format(builder: worker.blender.CodeBuilder) -> bool: + os.chdir(builder.blender_dir) + + # Always run formatting with scripts from main, for security on unverified patches. + # TODO: how secure is this? How to test formatting issues in the scripts themselves? + # main_files = [makefile, "tools/utils_maintenance", "build_files/windows"] + # for main_file in main_files: + # worker.utils.call(['git', 'checkout', 'origin/main', '--', main_file]) + + # Run format + if builder.platform == "windows": + builder.call(["make.bat", "format"]) + else: + builder.call(["make", "-f", "GNUmakefile", "format"]) + + # Check for changes + diff = worker.utils.check_output(["git", "diff"]) + if len(diff) > 0: + print(diff) + + # Reset + worker.utils.call(["git", "checkout", "HEAD", "--", "."]) + + if len(diff) > 0: + worker.utils.error('Incorrect formatting detected, run "make format" to fix') + return False + + return True + + +def lint(builder: worker.blender.CodeBuilder) -> None: + ok = make_format(builder) + if not ok: + sys.exit(1) diff --git a/config/worker/blender/msix_package.py b/config/worker/blender/msix_package.py new file mode 100644 index 0000000..7941bb8 --- /dev/null +++ b/config/worker/blender/msix_package.py @@ -0,0 +1,114 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import pathlib +import zipfile + +import worker.utils + + +def pack( + # Version string in the form of 2.83.3.0, this is used in the Store package name + version: str, + # Input file path + input_file_path: pathlib.Path, + # A string in the form of 'CN=PUBLISHER' + publisher: str, + # If set this MSIX is for an LTS release + lts: bool = False, + # If set remove Content folder if it already exists + overwrite: bool = False, + # Don't actually execute commands + dry_run: bool = False, +) -> pathlib.Path: + LTSORNOT = "" + PACKAGETYPE = "" + if lts: + versionparts = version.split(".") + LTSORNOT = f" {versionparts[0]}.{versionparts[1]} LTS" + PACKAGETYPE = f"{versionparts[0]}.{versionparts[1]}LTS" + + output_package_file_name = f"{input_file_path.stem}.msix" + output_package_file_path = pathlib.Path(".", output_package_file_name) + content_folder = pathlib.Path(".", "Content") + content_blender_folder = pathlib.Path(content_folder, "Blender") + content_assets_folder = pathlib.Path(content_folder, "Assets") + assets_original_folder = pathlib.Path(".", "Assets") + + pri_config_file = pathlib.Path(".", "priconfig.xml") + pri_resources_file = pathlib.Path(content_folder, "resources.pri") + + pri_command = [ + "makepri", + "new", + "/pr", + f"{content_folder.absolute()}", + "/cf", + f"{pri_config_file.absolute()}", + "/of", + f"{pri_resources_file.absolute()}", + ] + + msix_command = [ + "makeappx", + "pack", + "/h", + "sha256", + "/d", + f"{content_folder.absolute()}", + "/p", + f"{output_package_file_path.absolute()}", + ] + + if overwrite: + if content_folder.joinpath("Assets").exists(): + worker.utils.remove_dir(content_folder) + content_folder.mkdir(exist_ok=True) + worker.utils.copy_dir(assets_original_folder, content_assets_folder) + + manifest_text = pathlib.Path("AppxManifest.xml.template").read_text() + manifest_text = manifest_text.replace("[VERSION]", version) + manifest_text = manifest_text.replace("[PUBLISHER]", publisher) + manifest_text = manifest_text.replace("[LTSORNOT]", LTSORNOT) + manifest_text = manifest_text.replace("[PACKAGETYPE]", PACKAGETYPE) + pathlib.Path(content_folder, "AppxManifest.xml").write_text(manifest_text) + + worker.utils.info( + f"Extracting files from [{input_file_path}] to [{content_blender_folder.absolute()}]" + ) + + # Extract the files from the ZIP archive, but skip the leading part of paths + # in the ZIP. We want to write the files to the content_blender_folder where + # blender.exe ends up as ./Content/Blender/blender.exe, and not + # ./Content/Blender/blender-2.83.3-windows64/blender.exe + with zipfile.ZipFile(input_file_path, "r") as blender_zip: + for entry in blender_zip.infolist(): + if entry.is_dir(): + continue + entry_location = pathlib.Path(entry.filename) + target_location = content_blender_folder.joinpath(*entry_location.parts[1:]) + pathlib.Path(target_location.parent).mkdir(parents=True, exist_ok=True) + extracted_entry = blender_zip.read(entry) + target_location.write_bytes(extracted_entry) + + worker.utils.info("... extraction complete.") + + worker.utils.info("Generating Package Resource Index (PRI) file") + worker.utils.call(pri_command, dry_run=dry_run) + + worker.utils.info(f"Creating MSIX package using command: {' '.join(msix_command)}") + + # Remove MSIX file if it already exists. Otherwise the MakeAppX tool + # will hang. + worker.utils.remove_file(output_package_file_path) + worker.utils.call(msix_command, dry_run=dry_run) + + if dry_run: + output_package_file_path.write_text("Dry run dummy package file") + + worker.utils.remove_dir(content_folder) + + worker.utils.info("Done.") + + return output_package_file_path diff --git a/config/worker/blender/pack.py b/config/worker/blender/pack.py new file mode 100644 index 0000000..a39a6d8 --- /dev/null +++ b/config/worker/blender/pack.py @@ -0,0 +1,357 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +# Runs on buildbot worker, creating a release package using the build +# system and zipping it into buildbot_upload.zip. This is then uploaded +# to the master in the next buildbot step. + +import hashlib +import json +import os +import sys +import pathlib +import tarfile + +import worker.blender +import worker.utils + +import worker.blender.sign +import worker.blender.bundle_dmg +import worker.blender.version + + +# SemVer based file naming +def get_package_name(builder: worker.blender.CodeBuilder) -> str: + version_info = worker.blender.version.VersionInfo(builder) + + # For release branch we will trim redundant info + branch_id = ( + builder.branch_id.replace("/", "-") + .replace(".", "") + .replace("blender-", "") + .replace("-release", "") + ) + package_name = "bpy" if builder.python_module else "blender" + package_name += f"-{version_info.version}" + package_name += f"-{version_info.risk_id}" + package_name += f"+{branch_id}" + if builder.patch_id: + if builder.patch_id.startswith("D"): + package_name += f"-{builder.patch_id}" + else: + package_name += f"-PR{builder.patch_id}" + + package_name += f".{version_info.hash}" + package_name += f"-{builder.platform}" + package_name += f".{builder.architecture}" + package_name += f"-{builder.build_configuration}" + + return package_name + + +# Generate .sha256 file next to packge +def generate_file_hash(package_file_path: pathlib.Path) -> None: + hash_algorithm = hashlib.sha256() + + mem_array = bytearray(128 * 1024) + mem_view = memoryview(mem_array) + with open(package_file_path, "rb", buffering=0) as f: + while 1: + # https://github.com/python/typeshed/issues/2166 + n = f.readinto(mem_view) # type: ignore + if not n: + break + hash_algorithm.update(mem_view[:n]) + + hash_file_path = (package_file_path.parent) / (package_file_path.name + ".sha256") + hash_text = hash_algorithm.hexdigest() + hash_file_path.write_text(hash_text) + + worker.utils.info(f"Generated hash [{hash_file_path}]") + print(hash_text) + + +# tar cf archive.tar test.c --owner=0 --group=0 +def create_tar_xz(src: pathlib.Path, dest: pathlib.Path, package_name: str) -> None: + # One extra to remove leading os.sep when cleaning root for package_root + ln = len(str(src)) + 1 + flist = list() + + # Create list of tuples containing file and archive name + for root, dirs, files in os.walk(src): + package_root = os.path.join(package_name, root[ln:]) + flist.extend( + [(os.path.join(root, file), os.path.join(package_root, file)) for file in files] + ) + + # Set UID/GID of archived files to 0, otherwise they'd be owned by whatever + # user compiled the package. If root then unpacks it to /usr/local/ you get + # a security issue. + def _fakeroot(tarinfo: tarfile.TarInfo) -> tarfile.TarInfo: + tarinfo.gid = 0 + tarinfo.gname = "root" + tarinfo.uid = 0 + tarinfo.uname = "root" + return tarinfo + + # Silence false positive mypy error. + package = tarfile.open(dest, "w:xz", preset=6) # type: ignore[call-arg] + for entry in flist: + worker.utils.info(f"Adding [{entry[0]}] to archive [{entry[1]}]") + package.add(entry[0], entry[1], recursive=False, filter=_fakeroot) + package.close() + + +def cleanup_files(dirpath: pathlib.Path, extension: str) -> None: + if dirpath.exists(): + for filename in os.listdir(dirpath): + filepath = pathlib.Path(os.path.join(dirpath, filename)) + if filepath.is_file() and filename.endswith(extension): + worker.utils.remove_file(filepath) + + +def pack_mac(builder: worker.blender.CodeBuilder) -> None: + version_info = worker.blender.version.VersionInfo(builder) + + os.chdir(builder.build_dir) + cleanup_files(builder.package_dir, ".dmg") + + package_name = get_package_name(builder) + package_file_name = package_name + ".dmg" + package_file_path = builder.package_dir / package_file_name + + applescript_file_path = pathlib.Path(__file__).parent.resolve() / "blender.applescript" + background_image_file_path = builder.blender_dir / "release" / "darwin" / "background.tif" + + worker.blender.bundle_dmg.bundle( + builder.install_dir, package_file_path, applescript_file_path, background_image_file_path + ) + + # Sign + worker.blender.sign.sign_darwin_files(builder, [package_file_path], "entitlements.plist") + + # Notarize + worker_config = builder.get_worker_config() + team_id = worker_config.sign_code_darwin_team_id + apple_id = worker_config.sign_code_darwin_apple_id + keychain_profile = worker_config.sign_code_darwin_keychain_profile + timeout = "30m" + + if builder.service_env_id == "LOCAL" and not apple_id: + worker.utils.info("Skipping notarization without Apple ID in local build") + return + + # Upload file and wait for completion. + notarize_cmd = [ + "xcrun", + "notarytool", + "submit", + package_file_path, + "--apple-id", + worker.utils.HiddenArgument(apple_id), + "--keychain-profile", + worker.utils.HiddenArgument(keychain_profile), + "--team-id", + worker.utils.HiddenArgument(team_id), + "--timeout", + timeout, + "--wait", + "--output-format", + "json", + ] + + request = worker.utils.check_output(notarize_cmd) + + request_data = json.loads(request) + request_id = request_data["id"] + request_status = request_data["status"] + + # Show logs + worker.utils.call( + ["xcrun", "notarytool", "log", "--keychain-profile", keychain_profile, request_id], + retry_count=5, + retry_wait_time=10.0, + ) + + # Failed? + if request_status != "Accepted": + raise Exception("Notarization failed, aborting") + + # Staple it + worker.utils.call(["xcrun", "stapler", "staple", package_file_path]) + + generate_file_hash(package_file_path) + + +def pack_win(builder: worker.blender.CodeBuilder, pack_format: str) -> None: + os.chdir(builder.build_dir) + + if pack_format == "msi": + cpack_type = "WIX" + else: + cpack_type = "ZIP" + + package_extension = pack_format + cleanup_files(builder.package_dir, f".{package_extension}") + + script_folder_path = pathlib.Path(os.path.realpath(__file__)).parent + + # Will take care of codesigning and correct the folder name in zip + # + # Code signing is done as part of INSTALL target, which makes it possible to sign + # files which are aimed into a bundle and coming from a non-signed source (such as + # libraries SVN). + # + # This is achieved by specifying cpack_post.cmake as a post-install script run + # by cpack. cpack_post.ps1 takes care of actual code signing. + post_script_file_path = script_folder_path / "cpack_post.cmake" + + app_id = "Blender" + final_package_name = get_package_name(builder) + # MSI needs the app id for the Windows menu folder name + # It will also fail if anything else. + cpack_package_name = app_id if pack_format == "msi" else final_package_name + + cmake_cmd = [ + "cmake", + f"-DCPACK_PACKAGE_NAME:STRING={cpack_package_name}", + f"-DCPACK_OVERRIDE_PACKAGENAME:STRING={cpack_package_name}", + # Only works with ZIP, ignored by MSI + # f'-DARCHIVE_FILE:STRING={package_name}', + # f'-DCPACK_PACKAGE_FILE_NAME:STRING={cpack_package_name}', + f"-DCMAKE_INSTALL_PREFIX:PATH={builder.install_dir}", + f"-DPOSTINSTALL_SCRIPT:PATH={post_script_file_path}", + ".", + ] + builder.call(cmake_cmd) + + worker.utils.info("Packaging Blender files") + cpack_cmd = [ + "cpack", + "-G", + cpack_type, + # '--verbose', + "--trace-expand", + "-C", + builder.build_configuration, + "-B", + str(builder.package_dir), # CPACK_PACKAGE_DIRECTORY + "-P", + cpack_package_name, + ] + builder.call(cpack_cmd) + + final_package_file_name = f"{final_package_name}.{package_extension}" + final_package_file_path = builder.package_dir / final_package_file_name + + # HACK: Rename files correctly, packages are appended `-windows64` with no option to rename + bogus_cpack_file_path = ( + builder.package_dir / f"{cpack_package_name}-windows64.{package_extension}" + ) + + if pack_format == "zip": + if bogus_cpack_file_path.exists(): + worker.utils.info(f"Removing bogus file [{bogus_cpack_file_path}]") + worker.utils.remove_file(bogus_cpack_file_path) + + source_cpack_file_path = ( + builder.package_dir + / "_CPack_Packages" + / "Windows" + / "ZIP" + / f"{final_package_file_name}" + ) + worker.utils.info(f"Moving [{source_cpack_file_path}] to [{final_package_file_path}]") + os.rename(source_cpack_file_path, final_package_file_path) + else: + os.rename(bogus_cpack_file_path, final_package_file_path) + version_info = worker.blender.version.VersionInfo(builder) + description = f"Blender {version_info.version}" + worker.blender.sign.sign_windows_files(builder.service_env_id, [final_package_file_path], + description=description) + + generate_file_hash(final_package_file_path) + + +def pack_linux(builder: worker.blender.CodeBuilder) -> None: + blender_executable = builder.install_dir / "blender" + + version_info = worker.blender.version.VersionInfo(builder) + + # Strip all unused symbols from the binaries + worker.utils.info("Stripping binaries") + builder.call(["strip", "--strip-all", blender_executable]) + + worker.utils.info("Stripping python") + + # This should work for 3.0, but for now it is in 3.00 + py_target = builder.install_dir / version_info.short_version + if not os.path.exists(py_target): + # Support older format and current issue with 3.00 + py_target = builder.install_dir / ("%d.%02d" % (version_info.major, version_info.minor)) + + worker.utils.call(["find", py_target, "-iname", "*.so", "-exec", "strip", "-s", "{}", ";"]) + + package_name = get_package_name(builder) + package_file_name = f"{package_name}.tar.xz" + package_file_path = builder.package_dir / package_file_name + + worker.utils.info(f"Creating [{package_file_path}] archive") + + os.makedirs(builder.package_dir, exist_ok=True) + + create_tar_xz(builder.install_dir, package_file_path, package_name) + + generate_file_hash(package_file_path) + + +def pack_python_module(builder: worker.blender.CodeBuilder) -> None: + cleanup_files(builder.package_dir, ".whl") + cleanup_files(builder.package_dir, ".zip") + + package_name = get_package_name(builder) + ".zip" + package_filepath = builder.package_dir / package_name + pack_script = builder.blender_dir / "build_files" / "utils" / "make_bpy_wheel.py" + + # Make wheel + worker.utils.info("Packaging Python Wheel") + cmd = [sys.executable, pack_script, builder.install_dir] + cmd += ["--build-dir", builder.build_dir] + cmd += ["--output-dir", builder.package_dir] + builder.call(cmd) + + # Pack wheel in zip, until pipeline and www can deal with .whl files. + import zipfile + + with zipfile.ZipFile(package_filepath, "w") as zipf: + for whl_name in os.listdir(builder.package_dir): + if whl_name.endswith(".whl"): + whl_filepath = builder.package_dir / whl_name + zipf.write(whl_filepath, arcname=whl_name) + + cleanup_files(builder.package_dir, ".whl") + + generate_file_hash(package_filepath) + + +def pack(builder: worker.blender.CodeBuilder) -> None: + builder.setup_build_environment() + + # Create clean package directory + worker.utils.remove_dir(builder.package_dir) + os.makedirs(builder.package_dir, exist_ok=True) + + # Make sure install directory always exists + os.makedirs(builder.install_dir, exist_ok=True) + + if builder.python_module: + pack_python_module(builder) + elif builder.platform == "darwin": + pack_mac(builder) + elif builder.platform == "windows": + pack_win(builder, "zip") + if builder.track_id not in ["vdev", "vexp"]: + pack_win(builder, "msi") + elif builder.platform == "linux": + pack_linux(builder) diff --git a/config/worker/blender/sign.py b/config/worker/blender/sign.py new file mode 100644 index 0000000..9746c00 --- /dev/null +++ b/config/worker/blender/sign.py @@ -0,0 +1,195 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import pathlib +import sys + +from typing import Optional, Sequence + +import worker.blender +import worker.utils + + +def sign_windows_files( + service_env_id: str, + file_paths: Sequence[pathlib.Path], + description: Optional[str] = None, + certificate_id: str = "", +) -> None: + import conf.worker + + worker_config = conf.worker.get_config(service_env_id) + + # TODO: Rotate them if first 1 fails + timeserver = worker_config.sign_code_windows_time_servers[0] + server_url = worker_config.sign_code_windows_server_url + if not certificate_id: + certificate_id = worker_config.sign_code_windows_certificate + + dry_run = False + if service_env_id == "LOCAL" and not certificate_id: + worker.utils.warning("Performing dry run on LOCAL service environment") + dry_run = True + + cmd_args = [ + sys.executable, + "C:\\tools\\codesign.py", + "--server-url", + worker.utils.HiddenArgument(server_url), + ] + if description: + cmd_args += ["--description", description] + + cmd: worker.utils.CmdSequence = cmd_args + + # Signing one file at a time causes a stampede on servers, resulting in blocking. + # Instead sign in chunks of multiple files. + chunk_size = 25 # Sign how many files at a time + retry_count = 3 + + for i in range(0, len(file_paths), chunk_size): + file_chunks = file_paths[i : i + chunk_size] + worker.utils.call(list(cmd) + list(file_chunks), retry_count=retry_count, dry_run=dry_run) + + +def sign_windows(service_env_id: str, install_path: pathlib.Path) -> None: + # TODO: Why use a junction? Is there some failure with long file paths? + # worker.utils.info("Creating building link") + # temp_build_root_path = pathlib.Path("C:/BlenderTemp") + # os.makedirs(temp_build_root_path, exist_ok=True) + # orig_install_path = install_path + # install_path = temp_build_root_path / install_path.name + + try: + # TODO + # New-Item -type Junction -path install_path -value orig_install_path + + worker.utils.info("Collecting files to process") + file_paths = list(install_path.glob("*.exe")) + file_paths += list(install_path.glob("*.dll")) + file_paths += list(install_path.glob("*.pyd")) + file_paths = [f for f in file_paths if str(f).find("blender.crt") == -1] + for f in file_paths: + print(f) + + sign_windows_files(service_env_id, file_paths) + finally: + # worker.utils.info(f"Removing temporary folder {temp_build_root_path}") + # worker.utils.remove_dir(temp_build_root_path, retry_count=5, retry_wait_time=5.0) + + # TODO: is this really necessary? + # worker.utils.info("Flushing volume cache...") + # Write-VolumeCache -DriveLetter C + + # core_shell_retry_command -retry_count 5 -delay_in_milliseconds 1000 -script_block ` + # worker.utils.info("Junction information...") + # junction = Get-Item -Path install_path + # worker.utils.info(junction | Format-Table) + # worker.utils.info("Attempting to remove...") + # junction.Delete() + # worker.utils.info("Junction deleted!") + pass + + worker.utils.info("End of codesign steps") + + +def sign_darwin_files( + builder: worker.blender.CodeBuilder, + file_paths: Sequence[pathlib.Path], + entitlements_file_name: str +) -> None: + entitlements_path = builder.code_path / "release" / "darwin" / entitlements_file_name + + if not entitlements_path.exists(): + raise Exception(f"File {entitlements_path} not found, aborting") + + worker_config = builder.get_worker_config() + certificate_id = worker_config.sign_code_darwin_certificate + + dry_run = False + if builder.service_env_id == "LOCAL" and not certificate_id: + worker.utils.warning("Performing dry run on LOCAL service environment") + dry_run = True + + keychain_password = worker_config.darwin_keychain_password(builder.service_env_id) + cmd: worker.utils.CmdSequence = [ + "security", + "unlock-keychain", + "-p", + worker.utils.HiddenArgument(keychain_password), + ] + worker.utils.call(cmd, dry_run=dry_run) + + for file_path in file_paths: + if file_path.is_dir() and file_path.suffix != ".app": + continue + + # Remove signature + if file_path.suffix != ".dmg": + worker.utils.call( + ["codesign", "--remove-signature", file_path], exit_on_error=False, dry_run=dry_run + ) + + # Add signature + worker.utils.call( + [ + "codesign", + "--force", + "--timestamp", + "--options", + "runtime", + f"--entitlements={entitlements_path}", + "--sign", + certificate_id, + file_path, + ], + retry_count=3, + dry_run=dry_run, + ) + if file_path.suffix == ".app": + worker.utils.info(f"Vaildating app bundle {file_path}") + worker.utils.call( + ["codesign", "-vvv", "--deep", "--strict", file_path], dry_run=dry_run + ) + + +def sign_darwin(builder: worker.blender.CodeBuilder) -> None: + bundle_path = builder.install_dir / "Blender.app" + + # Executables + sign_path = bundle_path / "Contents" / "MacOS" + worker.utils.info(f"Collecting files to process in {sign_path}") + sign_darwin_files(builder, list(sign_path.rglob("*")), "entitlements.plist") + + # Thumbnailer app extension. + thumbnailer_appex_path = bundle_path / "Contents" / "PlugIns" / "blender-thumbnailer.appex" + if thumbnailer_appex_path.exists(): + sign_path = thumbnailer_appex_path / "Contents" / "MacOS" + worker.utils.info(f"Collecting files to process in {sign_path}") + sign_darwin_files(builder, list(sign_path.rglob("*")), "thumbnailer_entitlements.plist") + + # Shared librarys and Python + sign_path = bundle_path / "Contents" / "Resources" + worker.utils.info(f"Collecting files to process in {sign_path}") + file_paths = list( + set(sign_path.rglob("*.dylib")) + | set(sign_path.rglob("*.so")) + | set(sign_path.rglob("python3.*")) + ) + sign_darwin_files(builder, file_paths, "entitlements.plist") + + # Bundle + worker.utils.info(f"Signing app bundle {bundle_path}") + sign_darwin_files(builder, [bundle_path], "entitlements.plist") + + +def sign(builder: worker.blender.CodeBuilder) -> None: + builder.setup_build_environment() + + if builder.platform == "windows": + sign_windows(builder.service_env_id, builder.install_dir) + elif builder.platform == "darwin": + sign_darwin(builder) + else: + worker.utils.info("No code signing to be done on this platform") diff --git a/config/worker/blender/test.py b/config/worker/blender/test.py new file mode 100644 index 0000000..569f909 --- /dev/null +++ b/config/worker/blender/test.py @@ -0,0 +1,60 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import os +import shutil + +from typing import List + +import worker.blender +import worker.blender.pack +import worker.blender.compile + + +def get_ctest_arguments(builder: worker.blender.CodeBuilder) -> List[str]: + args = ["--output-on-failure"] + + # GPU tests are currently slow and can cause timeouts. + if not builder.needs_gpu_tests: + args += ["--parallel", "4"] + + args += ["-C", worker.blender.compile.get_cmake_build_type(builder)] + return args + + +def package_for_upload(builder: worker.blender.CodeBuilder, success: bool) -> None: + build_tests_dir = builder.build_dir / "tests" + package_tests_dir = builder.package_dir / "tests" + if not build_tests_dir.exists(): + return + + os.makedirs(package_tests_dir, exist_ok=True) + + # Upload package on failure + if not success: + package_filename = "tests-" + worker.blender.pack.get_package_name(builder) + package_filepath = package_tests_dir / package_filename + shutil.copytree(build_tests_dir, package_filepath) + shutil.make_archive(str(package_filepath), "zip", package_tests_dir, package_filename) + shutil.rmtree(package_filepath) + + # Always upload unpacked folder for main and release tracks, + # when using GPU tests. This is useful for debugging GPU + # differences. + if builder.track_id != "vexp" and builder.needs_gpu_tests: + branch = builder.branch_id.replace("blender-", "").replace("-release", "") + name = f"{branch}-{builder.platform}-{builder.architecture}" + shutil.copytree(build_tests_dir, package_tests_dir / name) + + +def test(builder: worker.blender.CodeBuilder) -> None: + builder.setup_build_environment() + os.chdir(builder.build_dir) + success = False + + try: + builder.call(["ctest"] + get_ctest_arguments(builder)) + success = True + finally: + package_for_upload(builder, success) diff --git a/config/worker/blender/update.py b/config/worker/blender/update.py new file mode 100644 index 0000000..cb5909d --- /dev/null +++ b/config/worker/blender/update.py @@ -0,0 +1,53 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import os +import sys + +import worker.blender +import worker.utils + + +def _clean_folders(builder: worker.blender.CodeBuilder) -> None: + # Delete build folders. + if builder.needs_full_clean: + worker.utils.remove_dir(builder.build_dir) + else: + worker.utils.remove_dir(builder.build_dir / "Testing") + worker.utils.remove_dir(builder.build_dir / "bin" / "tests") + + # Delete install and packaging folders + worker.utils.remove_dir(builder.install_dir) + worker.utils.remove_dir(builder.package_dir) + + +def update(builder: worker.blender.CodeBuilder) -> None: + _clean_folders(builder) + + builder.update_source() + os.chdir(builder.code_path) + + make_update_path = builder.code_path / "build_files" / "utils" / "make_update.py" + + make_update_text = make_update_path.read_text() + if "def svn_update" in make_update_text: + worker.utils.error("Can't build branch or pull request that uses Subversion libraries.") + worker.utils.error("Merge with latest main or release branch to use Git LFS libraries.") + sys.exit(1) + + # Run make update + cmd = [ + sys.executable, + make_update_path, + "--no-blender", + "--use-linux-libraries", + "--use-tests", + "--architecture", + builder.architecture, + ] + + if builder.track_id not in ("v360", "vexp"): + cmd += ["--prune-destructive"] + + worker.utils.call(cmd) diff --git a/config/worker/blender/version.py b/config/worker/blender/version.py new file mode 100644 index 0000000..24e7fca --- /dev/null +++ b/config/worker/blender/version.py @@ -0,0 +1,52 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import pathlib +import re + + +import worker.blender + + +class VersionInfo: + def __init__(self, builder: worker.blender.CodeBuilder): + # Get version information + buildinfo_h = builder.build_dir / "source" / "creator" / "buildinfo.h" + blender_h = ( + builder.blender_dir / "source" / "blender" / "blenkernel" / "BKE_blender_version.h" + ) + + version_number = int(self._parse_header_file(blender_h, "BLENDER_VERSION")) + + version_number_patch = int(self._parse_header_file(blender_h, "BLENDER_VERSION_PATCH")) + self.major, self.minor, self.patch = ( + version_number // 100, + version_number % 100, + version_number_patch, + ) + + if self.major >= 3: + self.short_version = "%d.%d" % (self.major, self.minor) + self.version = "%d.%d.%d" % (self.major, self.minor, self.patch) + else: + self.short_version = "%d.%02d" % (self.major, self.minor) + self.version = "%d.%02d.%d" % (self.major, self.minor, self.patch) + + self.version_cycle = self._parse_header_file(blender_h, "BLENDER_VERSION_CYCLE") + if buildinfo_h.exists(): + self.hash = self._parse_header_file(buildinfo_h, "BUILD_HASH")[1:-1] + else: + self.hash = "" + self.risk_id = self.version_cycle.replace("release", "stable").replace("rc", "candidate") + self.is_development_build = self.version_cycle == "alpha" + + def _parse_header_file(self, filename: pathlib.Path, define: str) -> str: + regex = re.compile(r"^#\s*define\s+%s\s+(.*)" % define) + with open(filename, "r") as file: + for l in file: + match = regex.match(l) + if match: + return match.group(1) + + raise BaseException(f"Failed to parse {filename.name} header for {define}") diff --git a/config/worker/code.py b/config/worker/code.py new file mode 100755 index 0000000..36833ea --- /dev/null +++ b/config/worker/code.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import pathlib +import sys + +from collections import OrderedDict + +sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent)) + +import worker.configure +import worker.utils + +import worker.blender.update +import worker.blender.lint +import worker.blender.compile +import worker.blender.test +import worker.blender.sign +import worker.blender.pack + + +if __name__ == "__main__": + steps: worker.utils.BuilderSteps = OrderedDict() + steps["configure-machine"] = worker.configure.configure_machine + steps["update-code"] = worker.blender.update.update + steps["lint-code"] = worker.blender.lint.lint + steps["compile-code"] = worker.blender.compile.compile_code + steps["compile-gpu"] = worker.blender.compile.compile_gpu + steps["compile-install"] = worker.blender.compile.compile_install + steps["test-code"] = worker.blender.test.test + steps["sign-code-binaries"] = worker.blender.sign.sign + steps["package-code-binaries"] = worker.blender.pack.pack + steps["clean"] = worker.blender.CodeBuilder.clean + + parser = worker.blender.create_argument_parser(steps=steps) + + args = parser.parse_args() + builder = worker.blender.CodeBuilder(args) + builder.setup_track_path() + builder.run(args.step, steps) diff --git a/config/worker/code_benchmark.py b/config/worker/code_benchmark.py new file mode 100755 index 0000000..f7837c5 --- /dev/null +++ b/config/worker/code_benchmark.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import argparse +import pathlib +import sys + +from collections import OrderedDict + +sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent)) + +import worker.configure +import worker.utils + +import worker.blender +import worker.blender.benchmark +import worker.blender.compile +import worker.blender.update + + +class BenchmarkBuilder(worker.blender.CodeBuilder): + def __init__(self, args: argparse.Namespace): + super().__init__(args) + self.setup_track_path() + + +if __name__ == "__main__": + steps: worker.utils.BuilderSteps = OrderedDict() + steps["configure-machine"] = worker.configure.configure_machine + steps["update-code"] = worker.blender.update.update + steps["compile-code"] = worker.blender.compile.compile_code + steps["compile-gpu"] = worker.blender.compile.compile_gpu + steps["compile-install"] = worker.blender.compile.compile_install + steps["benchmark"] = worker.blender.benchmark.benchmark + steps["clean"] = worker.blender.CodeBuilder.clean + + parser = worker.blender.create_argument_parser(steps=steps) + + args = parser.parse_args() + builder = BenchmarkBuilder(args) + builder.run(args.step, steps) diff --git a/config/worker/code_bpy_deploy.py b/config/worker/code_bpy_deploy.py new file mode 100755 index 0000000..e406ab9 --- /dev/null +++ b/config/worker/code_bpy_deploy.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import pathlib +import sys + +from collections import OrderedDict + +sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent)) + +import worker.configure +import worker.utils + +import worker.blender +import worker.blender.update + +import worker.deploy +import worker.deploy.pypi + + +if __name__ == "__main__": + steps: worker.utils.BuilderSteps = OrderedDict() + steps["configure-machine"] = worker.configure.configure_machine + steps["update-code"] = worker.blender.update.update + steps["pull"] = worker.deploy.pypi.pull + steps["deliver-pypi"] = worker.deploy.pypi.deliver + steps["clean"] = worker.deploy.CodeDeployBuilder.clean + + parser = worker.blender.create_argument_parser(steps=steps) + + args = parser.parse_args() + builder = worker.deploy.CodeDeployBuilder(args) + builder.run(args.step, steps) diff --git a/config/worker/code_deploy.py b/config/worker/code_deploy.py new file mode 100755 index 0000000..4faab34 --- /dev/null +++ b/config/worker/code_deploy.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import pathlib +import sys + +from collections import OrderedDict + +sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent)) + +import worker.configure +import worker.utils + +import worker.blender +import worker.blender.update + +import worker.deploy +import worker.deploy.source +import worker.deploy.artifacts +import worker.deploy.monitor + + +if __name__ == "__main__": + steps: worker.utils.BuilderSteps = OrderedDict() + steps["configure-machine"] = worker.configure.configure_machine + steps["update-code"] = worker.blender.update.update + steps["pull-artifacts"] = worker.deploy.artifacts.pull + steps["repackage-artifacts"] = worker.deploy.artifacts.repackage + steps["package-source"] = worker.deploy.source.package + steps["deploy-artifacts"] = worker.deploy.artifacts.deploy + steps["monitor-artifacts"] = worker.deploy.monitor.monitor + steps["clean"] = worker.deploy.CodeDeployBuilder.clean + + parser = worker.blender.create_argument_parser(steps=steps) + + args = parser.parse_args() + builder = worker.deploy.CodeDeployBuilder(args) + builder.run(args.step, steps) diff --git a/config/worker/code_store.py b/config/worker/code_store.py new file mode 100755 index 0000000..0ad5736 --- /dev/null +++ b/config/worker/code_store.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import pathlib +import sys + +from collections import OrderedDict + +sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent)) + +import worker.configure +import worker.utils + +import worker.blender +import worker.blender.update + +import worker.deploy +import worker.deploy.artifacts +import worker.deploy.snap +import worker.deploy.steam +import worker.deploy.windows + + +def package(builder: worker.deploy.CodeStoreBuilder) -> None: + if builder.store_id == "snap": + worker.deploy.snap.package(builder) + elif builder.store_id == "steam": + worker.deploy.steam.package(builder) + elif builder.store_id == "windows": + builder.setup_build_environment() + worker.deploy.windows.package(builder) + + +def deliver(builder: worker.deploy.CodeStoreBuilder) -> None: + if builder.store_id == "snap": + worker.deploy.snap.deliver(builder) + elif builder.store_id == "steam": + worker.deploy.steam.deliver(builder) + elif builder.store_id == "windows": + worker.deploy.windows.deliver(builder) + + +if __name__ == "__main__": + steps: worker.utils.BuilderSteps = OrderedDict() + steps["configure-machine"] = worker.configure.configure_machine + steps["update-code"] = worker.blender.update.update + steps["pull-artifacts"] = worker.deploy.artifacts.pull + steps["package"] = package + steps["deliver"] = deliver + steps["clean"] = worker.deploy.CodeDeployBuilder.clean + + parser = worker.blender.create_argument_parser(steps=steps) + parser.add_argument("--store-id", type=str, choices=["snap", "steam", "windows"], required=True) + + args = parser.parse_args() + builder = worker.deploy.CodeStoreBuilder(args) + builder.run(args.step, steps) diff --git a/config/worker/configure.py b/config/worker/configure.py new file mode 100644 index 0000000..4476e6c --- /dev/null +++ b/config/worker/configure.py @@ -0,0 +1,199 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import os +import pathlib +import platform +import psutil +import shutil + +from typing import List, Tuple + +import worker.utils + + +def get_os_release() -> str: + if platform.system() == "Darwin": + return "macOS " + platform.mac_ver()[0] + else: + return platform.version() + + +def get_cpu_info() -> str: + if platform.system() == "Darwin": + return worker.utils.check_output(["/usr/sbin/sysctl", "-n", "machdep.cpu.brand_string"]) + elif platform.system() == "Linux": + cpuinfo = pathlib.Path("/proc/cpuinfo").read_text() + for line in cpuinfo.splitlines(): + if line.find("model name") != -1: + return line.split(":")[1].strip() + + return platform.processor() + + +def disk_free_in_gb(builder: worker.utils.Builder) -> float: + _, _, disk_free = shutil.disk_usage(builder.track_path) + return disk_free / (1024.0**3) + + +def get_thread_count(thread_memory_in_GB: float) -> int: + num_threads = psutil.cpu_count() + memory_in_GB = psutil.virtual_memory().total / (1024**3) + + return min(int(memory_in_GB / thread_memory_in_GB), num_threads) + + +def clean(builder: worker.utils.Builder) -> None: + # Remove build folders to make space. + delete_paths: List[pathlib.Path] = [] + optional_delete_paths: List[pathlib.Path] = [] + + branches_config = builder.get_branches_config() + tracks = branches_config.track_major_minor_versions.keys() + + # TODO: don't hardcode these folder and track names + for track in tracks: + track_path = builder.tracks_root_path / ("blender-manual-" + track) + optional_delete_paths += [track_path / "build"] + + for track in tracks: + track_path = builder.tracks_root_path / ("blender-" + track) + delete_paths += [track_path / "build_download"] + delete_paths += [track_path / "build_linux"] + delete_paths += [track_path / "build_darwin"] + delete_paths += [track_path / "build_package"] + delete_paths += [track_path / "build_source"] + delete_paths += [track_path / "build_debug"] + delete_paths += [track_path / "build_arm64_debug"] + delete_paths += [track_path / "build_x86_64_debug"] + delete_paths += [track_path / "build_sanitizer"] + delete_paths += [track_path / "build_arm64_sanitizer"] + delete_paths += [track_path / "build_x86_64_sanitizer"] + delete_paths += [track_path / "install_release"] + delete_paths += [track_path / "install_asserts"] + delete_paths += [track_path / "install_sanitizer"] + delete_paths += [track_path / "install_debug"] + delete_paths += [track_path / "benchmark"] + optional_delete_paths += [track_path / "build_release"] + optional_delete_paths += [track_path / "build_arm64_release"] + optional_delete_paths += [track_path / "build_x86_64_release"] + optional_delete_paths += [track_path / "build_asserts"] + optional_delete_paths += [track_path / "build_arm64_asserts"] + optional_delete_paths += [track_path / "build_x86_64_asserts"] + + for delete_path in delete_paths: + worker.utils.remove_dir(delete_path) + + # Cached build folders only if we are low on disk space + if builder.platform == "darwin": + # On macOS APFS this is not reliable, it makes space on demand. + # This should be ok still. + required_space_gb = 12.0 + else: + required_space_gb = 25.0 + + free_space_gb = disk_free_in_gb(builder) + if free_space_gb < required_space_gb: + worker.utils.warning( + f"Trying to delete cached builds for disk space (free {free_space_gb:.2f} GB)" + ) + sorted_paths: List[Tuple[float, pathlib.Path]] = [] + for delete_path in optional_delete_paths: + try: + sorted_paths += [(os.path.getmtime(delete_path), delete_path)] + except: + pass + + for _, delete_path in sorted(sorted_paths): + worker.utils.remove_dir(delete_path) + if disk_free_in_gb(builder) >= required_space_gb: + break + + # Might be left over from git command hanging + stack_dump_file_path = builder.code_path / "sh.exe.stackdump" + worker.utils.remove_file(stack_dump_file_path) + + +def configure_machine(builder: worker.utils.Builder) -> None: + worker_config = builder.get_worker_config() + + clean(builder) + + # Print system information. + processor = get_cpu_info() + + worker.utils.info("System information") + print(f"System: {platform.system()}") + print(f"Release: {get_os_release()}") + print(f"Version: {platform.version()}") + print(f"Processor: {processor}") + print(f"Cores: {psutil.cpu_count()} logical, {psutil.cpu_count(logical=False)} physical") + print(f"Total Memory: {psutil.virtual_memory().total / (1024**3):.2f} GB") + print(f"Available Memory: {psutil.virtual_memory().available / (1024**3):.2f} GB") + + disk_total, disk_used, disk_free = shutil.disk_usage(builder.track_path) + print( + f"Disk: total {disk_total / (1024**3):.2f} GB, " + f"used {disk_used / (1024**3):.2f} GB, " + f"free {disk_free / (1024**3):.2f} GB" + ) + + # Check dependencies and provision + worker.utils.info("Checking installable software cache") + avilable_software_artifacts = worker_config.software_cache_path.glob("*/*") + for artifact in avilable_software_artifacts: + print(artifact) + + # Check packages + if builder.platform == "linux": + etc_rocky = pathlib.Path("/etc/rocky-release") + + if etc_rocky.exists(): + worker.utils.call(["yum", "updateinfo"]) + worker.utils.call(["yum", "list", "updates"]) + else: + worker.utils.call(["apt", "list", "--upgradable"]) + + elif builder.platform == "windows": + choco_version_str = worker.utils.check_output(["choco", "--version"]) + choco_version = [int(x) for x in choco_version_str.split(".")] + if choco_version[0] >= 2: + # In the newer Chocolatey versions `choco list` behavior got changed + # to only list installed package, and the --localonly flag has been + # removed. + worker.utils.call(["choco", "list"]) + else: + worker.utils.call(["choco", "list", "--lo"]) + worker.utils.call(["choco", "outdated"]) + + # Not an actual command, disabled for now. + # worker.utils.call(["scoop", "list"]) + # worker.utils.call(["scoop", "status"]) + + elif builder.platform == "darwin": + worker.utils.call(["brew", "update"]) + worker.utils.call(["brew", "outdated", "--cask"]) + worker.utils.call(["xcrun", "--show-sdk-path"]) + + # XXX Windows builder debug code + if builder.platform == "windows": + # Ensure the idiff.exe process is stopped. + # It might be hanging there since the previously failed build and it will + # prevent removal of the install directory for the new build (due to held + # open DLLs). + worker.utils.info("Stopping idiff.exe if running") + + dump_folder = pathlib.Path("C:\\tmp\\dump\\") + os.makedirs(dump_folder, exist_ok=True) + + worker.utils.call(["procdump", "idiff.exe", dump_folder], exit_on_error=False) + + for proc in psutil.process_iter(): + if proc.name() == "idiff.exe": + proc.kill() + + for proc in psutil.process_iter(): + if proc.name().lower() in ["blender", "blender.exe", "blender_test", "blender_test.exe"]: + worker.utils.warning("Killing stray Blender process") + proc.kill() diff --git a/config/worker/deploy/__init__.py b/config/worker/deploy/__init__.py new file mode 100644 index 0000000..f96757f --- /dev/null +++ b/config/worker/deploy/__init__.py @@ -0,0 +1,41 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import argparse +import pathlib + +import worker.blender +import worker.utils + + +class CodeDeployBuilder(worker.blender.CodeBuilder): + def __init__(self, args: argparse.Namespace): + super().__init__(args) + self.platform_ids = ["linux", "darwin", "windows"] + self.setup_track_path() + + track_path: pathlib.Path = self.track_path + + self.download_dir = track_path / "build_download" + self.package_source_dir = track_path / "build_source" + self.store_steam_dir = track_path / "build_store_steam" + self.store_snap_dir = track_path / "build_store_snap" + self.store_windows_dir = track_path / "build_store_windows" + + def clean(self): + worker.utils.remove_dir(self.download_dir) + worker.utils.remove_dir(self.package_dir) + worker.utils.remove_dir(self.package_source_dir) + worker.utils.remove_dir(self.store_steam_dir) + worker.utils.remove_dir(self.store_snap_dir) + worker.utils.remove_dir(self.store_windows_dir) + # Created by make source_archive_complete + worker.utils.remove_dir(self.track_path / "build_linux") + worker.utils.remove_dir(self.track_path / "build_darwin") + + +class CodeStoreBuilder(CodeDeployBuilder): + def __init__(self, args: argparse.Namespace): + super().__init__(args) + self.store_id = args.store_id diff --git a/config/worker/deploy/artifacts.py b/config/worker/deploy/artifacts.py new file mode 100644 index 0000000..240e08b --- /dev/null +++ b/config/worker/deploy/artifacts.py @@ -0,0 +1,251 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import json +import os +import pathlib +import urllib.request + +from typing import Any, Dict + +import worker.blender +import worker.blender.version +import worker.deploy +import worker.utils + + +checksums = ["md5", "sha256"] + + +def pull(builder: worker.deploy.CodeDeployBuilder) -> None: + retry_count = 0 + retry_delay_in_seconds = 30 + timeout_in_seconds = 60 + + pipeline_category = "daily" + if builder.track_id == "vexp": + pipeline_category = "experimental" + + log_path = builder.track_path / "log" + worker.utils.remove_dir(log_path) + os.makedirs(log_path, exist_ok=True) + + worker.utils.info("Cleaning package directory") + worker.utils.remove_dir(builder.package_dir) + os.makedirs(builder.package_dir, exist_ok=True) + + # Fetch builds information. + env_base_url = { + "LOCAL": "https://builder.blender.org", + "UATEST": "https://builder.uatest.blender.org", + "PROD": "https://builder.blender.org", + } + base_url = env_base_url[builder.service_env_id] + + search_url = f"{base_url}/download/{pipeline_category}?format=json&v=1" + + worker.utils.info(f"Fetching build JSON from [{search_url}]") + + builds_response = urllib.request.urlopen(search_url) + # TODO -timeout_sec timeout_in_seconds -retry_interval_sec retry_delay_in_seconds -maximum_retry_count retry_count + builds_json = json.load(builds_response) + + # Get builds matching our version. + worker.utils.info("Processing build JSON") + version_info = worker.blender.version.VersionInfo(builder) + + unique_builds: Dict[Any, Dict[Any, Any]] = {} + for build in builds_json: + if build["version"] != version_info.version: + continue + if build["file_extension"] in checksums: + continue + + # Correct incomplete file extension in JSON. + if build["file_name"].endswith(".tar.xz"): + build["file_extension"] = "tar.xz" + elif build["file_name"].endswith(".tar.gz"): + build["file_extension"] = "tar.gz" + elif build["file_name"].endswith(".tar.bz2"): + build["file_extension"] = "tar.bz2" + + key = (build["platform"], build["architecture"], build["file_extension"]) + if key in unique_builds: + # Prefer more stable builds, to avoid issue when multiple are present. + risk_id_order = ["stable", "candidate", "rc", "beta", "alpha", "edge"] + risk = build["risk_id"] + risk = risk_id_order.index(risk) if risk in risk_id_order else len(risk_id_order) + other_risk = unique_builds[key]["risk_id"] + other_risk = ( + risk_id_order.index(other_risk) + if other_risk in risk_id_order + else len(risk_id_order) + ) + if other_risk <= risk: + continue + else: + print(" ".join(key)) + + unique_builds[key] = build + + builds = list(unique_builds.values()) + + if len(builds) == 0: + raise Exception(f"No builds found for version [{version_info.version}] in [{search_url}]") + + # Download builds. + worker.utils.remove_dir(builder.download_dir) + os.makedirs(builder.download_dir, exist_ok=True) + + for build in builds: + file_uri = build["url"] + file_name = build["file_name"] + + worker.utils.info(f"Pull [{file_name}]") + + download_file_path = builder.download_dir / file_name + + worker.utils.info(f"Download [{file_uri}]") + urllib.request.urlretrieve(file_uri, download_file_path) + # TODO: retry and resume + # -resume -timeout_sec timeout_in_seconds -retry_interval_sec retry_delay_in_seconds -maximum_retry_count retry_count + + # Moving to build_package folder + worker.utils.info(f"Move to [{builder.package_dir}]") + worker.utils.move(download_file_path, builder.package_dir / download_file_path.name) + + worker.utils.remove_dir(builder.download_dir) + + # Write manifest of downloaded packages. + package_manifest = builder.package_dir / "manifest.json" + package_manifest.write_text(json.dumps(builds, indent=2)) + + +def repackage(builder: worker.deploy.CodeDeployBuilder) -> None: + version_info = worker.blender.version.VersionInfo(builder) + + deployable_path = builder.package_dir / "deployable" + worker.utils.remove_dir(deployable_path) + os.makedirs(deployable_path, exist_ok=True) + os.chdir(deployable_path) + + package_manifest = builder.package_dir / "manifest.json" + builds = json.loads(package_manifest.read_text()) + + checksum_file_paths = [] + + # Rename the files and the internal folders for zip and tar.xz files + for build in builds: + file_name = build["file_name"] + file_path = builder.package_dir / file_name + + worker.utils.info(f"Repackaging {file_name}") + + if builder.service_env_id == "PROD" and build["risk_id"] != "stable": + raise Exception( + "Can only repackage and deploy stable versions, found risk id '{build['risk_id']}'" + ) + + version = build["version"] + platform = build["platform"].replace("darwin", "macos") + architecture = build["architecture"].replace("86_", "").replace("amd", "x") + file_extension = build["file_extension"] + + current_folder_name = file_path.name[: -len("." + file_extension)] + new_folder_name = f"blender-{version}-{platform}-{architecture}" + new_file_name = f"{new_folder_name}.{file_extension}" + + source_file_path = file_path + dest_file_path = deployable_path / new_file_name + + worker.utils.info(f"Renaming file [{source_file_path}] to [{dest_file_path}]") + worker.utils.copy_file(source_file_path, dest_file_path) + + if file_extension == "zip": + worker.utils.info(f"Renaming internal folder to [{new_folder_name}]") + worker.utils.call(["7z", "rn", dest_file_path, current_folder_name, new_folder_name]) + elif file_extension == "tar.xz": + worker.utils.info(f"Extracting [{source_file_path}] to [{dest_file_path}]") + worker.utils.call(["tar", "-xf", source_file_path, "--directory", "."]) + + worker.utils.remove_file(dest_file_path) + worker.utils.move( + deployable_path / current_folder_name, deployable_path / new_folder_name + ) + + worker.utils.info(f"Compressing [{new_folder_name}] to [{dest_file_path}]") + cmd = [ + "tar", + "-cv", + "--owner=0", + "--group=0", + "--use-compress-program", + "xz -6", + "-f", + dest_file_path, + new_folder_name, + ] + worker.utils.call(cmd) + worker.utils.remove_dir(deployable_path / new_folder_name) + + checksum_file_paths.append(dest_file_path) + + # Create checksums + worker.utils.info("Creating checksums") + os.chdir(deployable_path) + + for checksum in checksums: + checksum_text = "" + for filepath in checksum_file_paths: + checksum_line = worker.utils.check_output([f"{checksum}sum", filepath.name]).strip() + checksum_text += checksum_line + "\n" + + print(checksum_text) + checksum_filepath = deployable_path / f"blender-{version_info.version}.{checksum}" + checksum_filepath.write_text(checksum_text) + + +def deploy(builder: worker.deploy.CodeDeployBuilder) -> None: + # No testable on UATEST currently. + dry_run = builder.service_env_id not in ("LOCAL", "PROD") + worker_config = builder.get_worker_config() + connect_id = f"{worker_config.download_user}@{worker_config.download_machine}" + + # Copy source + remote_dest_path = pathlib.Path(worker_config.download_source_folder) + change_modes = ["F0444"] + + if builder.service_env_id != "PROD": + # Already assumed to exist on production + worker.utils.call_ssh(connect_id, ["mkdir", "-p", remote_dest_path], dry_run=dry_run) + + for source_path in builder.package_source_dir.iterdir(): + dest_path = f"{connect_id}:{remote_dest_path}/" + worker.utils.info(f"Deploying source package [{source_path}]") + worker.utils.rsync( + source_path, dest_path, change_modes=change_modes, show_names=True, dry_run=dry_run + ) + + worker.utils.call_ssh(connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run) + + # Copy binaries + version_info = worker.blender.version.VersionInfo(builder) + major_minor_version = version_info.short_version + remote_dest_path = ( + pathlib.Path(worker_config.download_release_folder) / f"Blender{major_minor_version}" + ) + deployable_path = builder.package_dir / "deployable" + change_modes = ["F0444"] + + worker.utils.call_ssh(connect_id, ["mkdir", "-p", remote_dest_path], dry_run=dry_run) + worker.utils.call_ssh(connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run) + + for source_path in deployable_path.iterdir(): + dest_path = f"{connect_id}:{remote_dest_path}/" + worker.utils.info(f"Deploying binary package [{source_path}]") + worker.utils.rsync( + source_path, dest_path, change_modes=change_modes, show_names=True, dry_run=dry_run + ) + + worker.utils.call_ssh(connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run) diff --git a/config/worker/deploy/monitor.py b/config/worker/deploy/monitor.py new file mode 100644 index 0000000..206cb63 --- /dev/null +++ b/config/worker/deploy/monitor.py @@ -0,0 +1,110 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import re +import time +import urllib.request + +import worker.blender.version +import worker.deploy.artifacts +import worker.deploy +import worker.utils + + +def monitor(builder: worker.deploy.CodeDeployBuilder) -> None: + wait_time_in_seconds = 120 + + start_time = time.time() + max_time_hours = 4.0 + + version_info = worker.blender.version.VersionInfo(builder) + + required_base_url = "https://mirror.clarkson.edu/blender/release" + monitored_base_urls = [ + "https://download.blender.org/release", + "https://ftp.nluug.nl/pub/graphics/blender/release", + "https://ftp.halifax.rwth-aachen.de/blender/release", + "https://mirrors.dotsrc.org/blender/blender-release", + "https://mirrors.ocf.berkeley.edu/blender/release", + "https://mirrors.iu13.net/blender/release", + "https://mirrors.aliyun.com/blender/release", + "https://mirrors.sahilister.in/blender/release", + "https://mirror.freedif.org/blender/release", + required_base_url, + ] + + stop_on_required_site_found = False + + branches_config = builder.get_branches_config() + expected_platforms = branches_config.code_official_platform_architectures[builder.track_id] + + expected_file_count = len(worker.deploy.artifacts.checksums) + for expected_platform in expected_platforms: + if expected_platform.startswith("windows"): + expected_file_count += 3 # msi, msix, zip + else: + expected_file_count += 1 + + folder_name = f"Blender{version_info.short_version}" + file_pattern = rf"[Bb]lender-{version_info.version}[\.\-\_a-zA-Z0-9]*" + + while True: + found_site_count = 0 + print("=" * 80) + + # Assume no files are missing + sites_missing_files_count = 0 + + for base_url in monitored_base_urls: + search_url = f"{base_url}/{folder_name}" + print(f"Checking [{search_url}] for version [{version_info.version}]") + + # Header to avoid getting permission denied. + request = urllib.request.Request(search_url, headers={"User-Agent": "Mozilla"}) + + try: + response = urllib.request.urlopen(request, timeout=5.0) + text = response.read().decode("utf-8", "ignore") + except Exception as e: + print(e) + text = "" + + matches = set(re.findall(file_pattern, text)) + found_file_count = len(matches) + for match in matches: + print(f"File [{match}]") + + if len(matches) == expected_file_count: + found_site_count += 1 + elif len(matches) > 0: + sites_missing_files_count += 1 + print("-" * 80) + + can_stop_monitoring = ( + (len(matches) == expected_file_count) + and (base_url == required_base_url) + and (sites_missing_files_count == 0) + ) + + if stop_on_required_site_found and can_stop_monitoring: + print(f"Required site found [{required_base_url}], stopping") + return + + print("") + print("=" * 80) + print(f"Sites [{found_site_count} of {len(monitored_base_urls)}] have all files") + print("=" * 80) + + if found_site_count == len(monitored_base_urls): + break + + remaining_time_hours = max_time_hours - (time.time() - start_time) / 3600.0 + if remaining_time_hours < 0.0: + print("Waited for maximum amount of time, stopping") + break + + print( + f"Waiting {wait_time_in_seconds}s, total wait time remaining {remaining_time_hours:.2f}h" + ) + time.sleep(wait_time_in_seconds) diff --git a/config/worker/deploy/pypi.py b/config/worker/deploy/pypi.py new file mode 100644 index 0000000..51b8ae1 --- /dev/null +++ b/config/worker/deploy/pypi.py @@ -0,0 +1,103 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import json +import os +import urllib.request +import zipfile + + +import worker.blender +import worker.blender.version +import worker.deploy +import worker.utils + + +def pull(builder: worker.deploy.CodeDeployBuilder) -> None: + version_info = worker.blender.version.VersionInfo(builder) + + worker.utils.info("Cleaning package and download directory") + worker.utils.remove_dir(builder.package_dir) + worker.utils.remove_dir(builder.download_dir) + os.makedirs(builder.package_dir, exist_ok=True) + os.makedirs(builder.download_dir, exist_ok=True) + + # Fetch builds information. + env_base_url = { + "LOCAL": "https://builder.blender.org", + "UATEST": "https://builder.uatest.blender.org", + "PROD": "https://builder.blender.org", + } + base_url = env_base_url[builder.service_env_id] + + search_url = f"{base_url}/download/bpy/?format=json&v=1" + + worker.utils.info(f"Fetching build JSON from [{search_url}]") + + builds_response = urllib.request.urlopen(search_url) + builds_json = json.load(builds_response) + + # Get builds matching our version. + worker.utils.info("Processing build JSON") + + matching_builds = [] + for build in builds_json: + if build["version"] != version_info.version: + continue + if not build["file_name"].endswith(".zip"): + continue + worker.utils.info(f"Found {build['file_name']}") + if build["risk_id"] != "stable": + raise Exception("Can not only deploy stable releases") + matching_builds.append(build) + + # Check expected platforms + branches_config = builder.get_branches_config() + expected_platforms = branches_config.code_official_platform_architectures[builder.track_id] + if len(expected_platforms) != len(matching_builds): + platform_names = "\n".join(expected_platforms) + raise Exception("Unexpected number of builds, expected:\n" + platform_names) + + # Download builds. + for build in matching_builds: + file_uri = build["url"] + file_name = build["file_name"] + + worker.utils.info(f"Download [{file_uri}]") + download_file_path = builder.download_dir / file_name + urllib.request.urlretrieve(file_uri, download_file_path) + + # Unzip. + with zipfile.ZipFile(download_file_path, "r") as zipf: + zipf.extractall(path=builder.package_dir) + + worker.utils.remove_dir(builder.download_dir) + + +def deliver(builder: worker.deploy.CodeDeployBuilder) -> None: + dry_run = builder.service_env_id != "PROD" + wheels = list(builder.package_dir.glob("*.whl")) + + # Check expected platforms + branches_config = builder.get_branches_config() + expected_platforms = branches_config.code_official_platform_architectures[builder.track_id] + wheel_names = "\n".join([wheel.name for wheel in wheels]) + wheel_paths = [str(wheel) for wheel in wheels] + print(wheel_names) + if len(expected_platforms) != len(wheels): + raise Exception("Unexpected number of wheels:\n" + wheel_names) + + # Check wheels + cmd = ["twine", "check"] + wheel_paths + worker.utils.call(cmd) + + # Upload + worker_config = builder.get_worker_config() + env = os.environ.copy() + env["TWINE_USERNAME"] = "__token__" + env["TWINE_PASSWORD"] = worker_config.pypi_token(builder.service_env_id) + env["TWINE_REPOSITORY_URL"] = "https://upload.pypi.org/legacy/" + + cmd = ["twine", "upload", "--verbose", "--non-interactive"] + wheel_paths + worker.utils.call(cmd, env=env, dry_run=dry_run) diff --git a/config/worker/deploy/snap.py b/config/worker/deploy/snap.py new file mode 100644 index 0000000..cb06cb8 --- /dev/null +++ b/config/worker/deploy/snap.py @@ -0,0 +1,161 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import json +import os + +import worker.blender.version +import worker.deploy +import worker.utils + + +def package(builder: worker.deploy.CodeStoreBuilder) -> None: + dry_run = False + if builder.service_env_id == "LOCAL" and not ( + builder.platform == "linux" and worker.utils.is_tool("snapcraft") + ): + worker.utils.warning("Performing dry run on LOCAL service environment") + dry_run = True + elif not builder.platform == "linux": + raise Exception("Can only run snapcraft on Linux, aborting") + + version_info = worker.blender.version.VersionInfo(builder) + + needs_stable_grade = version_info.risk_id in ["candidate", "stable"] + grade = "stable" if needs_stable_grade else "devel" + + # Clean directory + for old_package_file in builder.store_snap_dir.glob("*.tar.xz"): + worker.utils.remove_file(old_package_file) + os.makedirs(builder.store_snap_dir, exist_ok=True) + + # Get input package file path + package_manifest = builder.package_dir / "manifest.json" + builds = json.loads(package_manifest.read_text()) + linux_package_file_path = None + + for build in builds: + if build["platform"] == "linux" and build["file_extension"] == "tar.xz": + linux_package_file_path = builder.package_dir / build["file_name"] + break + if not linux_package_file_path: + raise Exception(f"Linux package not found in [{builder.package_dir}] manifest") + + source_file_path = linux_package_file_path + dest_file_path = builder.store_snap_dir / linux_package_file_path.name + worker.utils.info(f"Copy file [{source_file_path}] -> [{dest_file_path}]") + worker.utils.copy_file(source_file_path, dest_file_path) + + freedesktop_path = builder.code_path / "release" / "freedesktop" + snap_source_root_path = freedesktop_path / "snap" + + blender_icon_file_name = "blender.svg" + snapcraft_template_file_path = snap_source_root_path / "blender-snapcraft-template.yaml" + + worker.utils.info(f"Using snap config file [{snapcraft_template_file_path}]") + snapcraft_text = snapcraft_template_file_path.read_text() + snapcraft_text = snapcraft_text.replace("@VERSION@", version_info.version) + snapcraft_text = snapcraft_text.replace("@GRADE@", grade) + snapcraft_text = snapcraft_text.replace("@ICON_PATH@", f"./{blender_icon_file_name}") + snapcraft_text = snapcraft_text.replace("@PACKAGE_PATH@", f"./{linux_package_file_path.name}") + + snapcraft_file_path = builder.store_snap_dir / "snapcraft.yaml" + worker.utils.info(f"Saving snapcraft config file [{snapcraft_file_path}]") + snapcraft_file_path.write_text(snapcraft_text) + print(snapcraft_text) + + snap_package_file_name = f"blender_{version_info.version}_amd64.snap" + snap_package_file_path = builder.store_snap_dir / snap_package_file_name + if snap_package_file_path.exists(): + worker.utils.info(f"Clearing snap file [{snap_package_file_path}]") + worker.utils.remove_file(snap_package_file_path) + + os.chdir(builder.store_snap_dir) + + # Copy all required files into working folder + source_file_path = freedesktop_path / "icons" / "scalable" / "apps" / blender_icon_file_name + dest_file_path = builder.store_snap_dir / "blender.svg" + worker.utils.info(f"Copy file [{source_file_path}] -> [{dest_file_path}]") + worker.utils.copy_file(source_file_path, dest_file_path) + + source_file_path = snap_source_root_path / "blender-wrapper" + dest_file_path = builder.store_snap_dir / "blender-wrapper" + worker.utils.info(f"Copy file [{source_file_path}] -> [{dest_file_path}]") + worker.utils.copy_file(source_file_path, dest_file_path) + + worker.utils.call(["snapcraft", "clean", "--use-lxd"], dry_run=dry_run) + worker.utils.call(["snapcraft", "--use-lxd"], dry_run=dry_run) + worker.utils.call( + ["review-tools.snap-review", snap_package_file_path, "--allow-classic"], dry_run=dry_run + ) + + if dry_run: + snap_package_file_path.write_text("Dry run dummy package file") + + worker.utils.info("To test the snap package run this command") + print("sudo snap remove blender") + print(f"sudo snap install --dangerous --classic {snap_package_file_path}") + + +def deliver(builder: worker.deploy.CodeStoreBuilder) -> None: + dry_run = False + if builder.service_env_id == "LOCAL": + worker.utils.warning("Performing dry run on LOCAL service environment") + dry_run = True + elif not builder.platform == "linux": + raise Exception("Can only run snapcraft on Linux, aborting") + + version_info = worker.blender.version.VersionInfo(builder) + branches_config = builder.get_branches_config() + is_lts = builder.track_id in branches_config.all_lts_tracks + is_latest = ( + branches_config.track_major_minor_versions[builder.track_id] == version_info.short_version + ) + + # Never push to stable + snap_risk_id = version_info.risk_id.replace("stable", "candidate").replace("alpha", "edge") + if snap_risk_id == "stable": + raise Exception("Delivery to [stable] channel not allowed") + + snap_track_id = version_info.short_version + + if is_lts: + snap_track_id += "lts" + needs_release = True + elif is_latest: + # latest/edge always vdev + snap_track_id = "latest" + needs_release = True + else: + # Push current release under development to beta or candidate + needs_release = True + + # worker.utils.call(["snapcraft", "list-tracks", "blender"], dry_run=dry_run) + snap_package_file_name = f"blender_{version_info.version}_amd64.snap" + snap_package_file_path = builder.store_snap_dir / snap_package_file_name + if not snap_package_file_path.exists(): + raise Exception(f"Snap file [{snap_package_file_path}] missing") + + worker_config = builder.get_worker_config() + env = os.environ.copy() + env["SNAPCRAFT_STORE_CREDENTIALS"] = worker_config.snap_credentials(builder.service_env_id) + + # If this fails, then the permissions were not set correcty with acls + worker.utils.call(["snapcraft", "status", "blender"], dry_run=dry_run, env=env) + + if needs_release: + # Upload and release. + snap_channel = f"{snap_track_id}/{snap_risk_id}" + cmd = ["snapcraft", "upload", "--release", snap_channel, snap_package_file_path] + else: + # Upload only. + snap_channel = "" + cmd = ["snapcraft", "upload", snap_package_file_path] + + # Some api call is making this fail, seems to be status based as we can upload and set channel + worker.utils.call(cmd, retry_count=5, retry_wait_time=120, dry_run=dry_run, env=env) + + if needs_release: + worker.utils.info("To test the snap package run this command") + print(f"sudo snap refresh blender --classic --channel {snap_channel}") diff --git a/config/worker/deploy/source.py b/config/worker/deploy/source.py new file mode 100644 index 0000000..cd58069 --- /dev/null +++ b/config/worker/deploy/source.py @@ -0,0 +1,38 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import os + +import worker.blender.version +import worker.deploy +import worker.utils + + +def _package(builder: worker.deploy.CodeDeployBuilder, needs_complete: bool = False) -> None: + os.chdir(builder.code_path) + if needs_complete: + worker.utils.call(["make", "source_archive_complete"]) + else: + worker.utils.call(["make", "source_archive"]) + + # The make change scripts writes to a different location since 2.83. + for source_file in builder.code_path.glob("blender-*.tar.xz*"): + worker.utils.move(source_file, builder.package_source_dir / source_file.name) + for source_file in builder.track_path.glob("blender-*.tar.xz*"): + worker.utils.move(source_file, builder.package_source_dir / source_file.name) + + +def package(builder: worker.deploy.CodeDeployBuilder) -> None: + print(f"Cleaning path [{builder.package_source_dir}]") + worker.utils.remove_dir(builder.package_source_dir) + os.makedirs(builder.package_source_dir, exist_ok=True) + + _package(builder, needs_complete=False) + + version_info = worker.blender.version.VersionInfo(builder) + if version_info.patch != 0: + worker.utils.info("Skipping complete source package for patch release") + return + + _package(builder, needs_complete=True) diff --git a/config/worker/deploy/steam.py b/config/worker/deploy/steam.py new file mode 100644 index 0000000..fa96bfe --- /dev/null +++ b/config/worker/deploy/steam.py @@ -0,0 +1,260 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import json +import os +import pathlib +import time + +import worker.blender.version +import worker.deploy +import worker.utils + + +def extract_file( + builder: worker.deploy.CodeStoreBuilder, source_file_path: pathlib.Path, platform: str +) -> None: + worker.utils.info(f"Extracting artifact [{source_file_path}] for Steam") + if not source_file_path.exists(): + raise Exception("File not found, aborting") + + dest_extract_path = builder.store_steam_dir / platform + dest_content_path = dest_extract_path / "content" + worker.utils.remove_dir(dest_extract_path) + worker.utils.remove_dir(dest_content_path) + os.makedirs(dest_extract_path, exist_ok=True) + + if platform == "linux": + worker.utils.info(f"Extract [{source_file_path}] to [{dest_extract_path}]") + cmd = ["tar", "-xf", source_file_path, "--directory", dest_extract_path] + worker.utils.call(cmd) + + # Move any folder there as ./content + for source_content_path in dest_extract_path.iterdir(): + if source_content_path.is_dir(): + worker.utils.info(f"Move [{source_content_path.name}] -> [{dest_content_path}]") + worker.utils.move(source_content_path, dest_content_path) + break + + elif platform == "darwin": + source_content_path = dest_extract_path / "Blender" + if source_content_path.exists(): + worker.utils.info(f"Removing [{source_content_path}]") + worker.utils.remove_dir(source_content_path) + + image_file_path = source_file_path.with_suffix(".img") + + cmd = ["dmg2img", "-v", "-i", source_file_path, "-o", image_file_path] + worker.utils.call(cmd) + + cmd = ["7z", "x", f"-o{dest_extract_path}", image_file_path] + worker.utils.call(cmd) + + os.makedirs(dest_content_path, exist_ok=True) + + worker.utils.remove_file(image_file_path) + + worker.utils.info(f"Move Blender app from [{source_content_path}] -> [{dest_content_path}]") + worker.utils.move(source_content_path / "Blender.app", dest_content_path / "Blender.app") + worker.utils.remove_dir(source_content_path) + elif platform == "windows": + worker.utils.info(f"Extracting zip file [{source_file_path}]") + cmd = ["7z", "x", f"-o{dest_extract_path}", source_file_path] + worker.utils.call(cmd) + + # Move any folder there as ./content + for source_content_path in dest_extract_path.iterdir(): + if source_content_path.is_dir(): + worker.utils.info(f"Move [{source_content_path.name}] -> [{dest_content_path}]") + worker.utils.move(source_content_path, dest_content_path) + break + else: + raise Exception(f"Don't know how to extract for platform [{platform}]") + + +def extract(builder: worker.deploy.CodeStoreBuilder) -> None: + package_manifest = builder.package_dir / "manifest.json" + builds = json.loads(package_manifest.read_text()) + + for build in builds: + if build["file_extension"] not in ["zip", "tar.xz", "dmg"]: + continue + if build["architecture"] == "arm64": + continue + + file_path = builder.package_dir / build["file_name"] + platform = build["platform"] + extract_file(builder, file_path, platform) + + +def build(builder: worker.deploy.CodeStoreBuilder, is_preview: bool) -> None: + dry_run = False + if builder.service_env_id == "LOCAL": + worker.utils.warning("Performing dry run on LOCAL service environment") + dry_run = True + + version_info = worker.blender.version.VersionInfo(builder) + branches_config = builder.get_branches_config() + is_lts = builder.track_id in branches_config.all_lts_tracks + is_latest = branches_config.track_major_minor_versions["vdev"] == version_info.short_version + + track_path = builder.track_path + log_path = builder.track_path / "log" + worker.utils.remove_dir(log_path) + os.makedirs(log_path, exist_ok=True) + + worker_config = builder.get_worker_config() + steam_credentials = worker_config.steam_credentials(builder.service_env_id) + steam_user_id, steam_user_password = steam_credentials + if not steam_user_id or not steam_user_password: + if not dry_run: + raise Exception("Steam user id or password not available, aborting") + + env = os.environ.copy() + env["PATH"] = env["PATH"] + os.pathsep + "/usr/games" + + cmd: worker.utils.CmdSequence = [ + "steamcmd", + "+login", + worker.utils.HiddenArgument(steam_user_id), + worker.utils.HiddenArgument(steam_user_password), + "+quit", + ] + worker.utils.call(cmd, dry_run=dry_run, env=env) + + worker.utils.info("Waiting 5 seconds for next steam command") + time.sleep(5.0) + + steam_app_id = worker_config.steam_app_id + steam_platform_depot_ids = worker_config.steam_platform_depot_ids + + for platform_id in ["linux", "darwin", "windows"]: + worker.utils.info(f"Platform {platform_id}") + + platform_depot_id = steam_platform_depot_ids[platform_id] + + track_build_root_path = builder.store_steam_dir / platform_id + if not track_build_root_path.exists(): + raise Exception(f"Folder {track_build_root_path} does not exist") + + platform_build_file_path = track_build_root_path / "depot_build.vdf" + + source_root_path = track_build_root_path / "content" + if not source_root_path.exists(): + raise Exception(f"Folder {source_root_path} does not exist") + + dest_root_path = track_build_root_path / "output" + + # Steam branches cannot be uper case and no spaces allowed + # Branches are named "daily" and "devtest" on Steam, so rename those. + steam_branch_id = builder.service_env_id.lower() + steam_branch_id = steam_branch_id.replace("prod", "daily") + steam_branch_id = steam_branch_id.replace("uatest", "devtest") + + if is_lts: + # daily-X.X and devtest-X.X branches for LTS. + steam_branch_id = f"{steam_branch_id}-{version_info.short_version}" + elif is_latest: + # daily and devtest branches for main without suffix. + pass + else: + # Not setting this live. + steam_branch_id = "" + + preview = "1" if is_preview else "0" + + app_build_script = f""" +"appbuild" +{{ + "appid" "{steam_app_id}" + "desc" "Blender {version_info.version}" // description for this build + "buildoutput" "{dest_root_path}" // build output folder for .log, .csm & .csd files, relative to location of this file + "contentroot" "{source_root_path}" // root content folder, relative to location of this file + "setlive" "{steam_branch_id}" // branch to set live after successful build, non if empty + "preview" "{preview}" // 1 to enable preview builds, 0 to commit build to steampipe + "local" "" // set to file path of local content server + + "depots" + {{ + "{platform_depot_id}" "{platform_build_file_path}" + }} +}} +""" + + platform_build_script = f""" +"DepotBuildConfig" +{{ + // Set your assigned depot ID here + "DepotID" "{platform_depot_id}" + + // Set a root for all content. + // All relative paths specified below (LocalPath in FileMapping entries, and FileExclusion paths) + // will be resolved relative to this root. + // If you don't define ContentRoot, then it will be assumed to be + // the location of this script file, which probably isn't what you want + "ContentRoot" "{source_root_path}" + + // include all files recursivley + "FileMapping" + {{ + // This can be a full path, or a path relative to ContentRoot + "LocalPath" "*" + + // This is a path relative to the install folder of your game + "DepotPath" "." + + // If LocalPath contains wildcards, setting this means that all + // matching files within subdirectories of LocalPath will also + // be included. + "recursive" "1" + }} + + // but exclude all symbol files + // This can be a full path, or a path relative to ContentRoot + //"FileExclusion" "*.pdb" +}} +""" + + (track_build_root_path / "app_build.vdf").write_text(app_build_script) + platform_build_file_path.write_text(platform_build_script) + + worker.utils.info( + f"Version [{version_info.version}] for [{platform_id}] in preview [{is_preview}] for steam branch [{steam_branch_id}], building" + ) + + cmd = [ + "steamcmd", + "+login", + worker.utils.HiddenArgument(steam_user_id), + worker.utils.HiddenArgument(steam_user_password), + "+run_app_build", + track_build_root_path / "app_build.vdf", + "+quit", + ] + retry_count = 0 if preview else 3 + + worker.utils.call( + cmd, retry_count=retry_count, retry_wait_time=120, dry_run=dry_run, env=env + ) + + worker.utils.info("Waiting 5 seconds for next steam command") + time.sleep(5.0) + + worker.utils.info( + f"Version [{version_info.version}] for [{platform_id}] in preview [{is_preview}] is done, success" + ) + + +def package(builder: worker.deploy.CodeStoreBuilder) -> None: + worker.utils.remove_dir(builder.store_steam_dir) + os.makedirs(builder.store_steam_dir, exist_ok=True) + + # Extract and prepare content + extract(builder) + build(builder, is_preview=True) + + +def deliver(builder: worker.deploy.CodeStoreBuilder) -> None: + # This will push to the store + build(builder, is_preview=False) diff --git a/config/worker/deploy/windows.py b/config/worker/deploy/windows.py new file mode 100644 index 0000000..8bece72 --- /dev/null +++ b/config/worker/deploy/windows.py @@ -0,0 +1,116 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import json +import os + +import worker.blender.pack +import worker.blender.sign +import worker.blender.version +import worker.blender.msix_package + +import worker.deploy +import worker.utils + + +def _package_architecture( + builder: worker.deploy.CodeStoreBuilder, architecture: str, dry_run: bool +) -> None: + version_info = worker.blender.version.VersionInfo(builder) + + # Revision with MS Store must be set to 0 + revision_id = 0 + + branches_config = builder.get_branches_config() + is_lts = builder.track_id in branches_config.windows_store_lts_tracks + base_build_number = 0 + + build_number = version_info.patch + base_build_number + worker.utils.info(f"Builder number {build_number}") + + store_version_id = f"{version_info.short_version}.{build_number}.{revision_id}" + worker.utils.info(f"Store version ID {store_version_id}") + + worker.utils.info(f"Cleaning path [{builder.store_windows_dir}]") + worker.utils.remove_dir(builder.store_windows_dir) + os.makedirs(builder.store_windows_dir, exist_ok=True) + + os.chdir(builder.store_windows_dir) + + # Find input zip package. + package_manifest = builder.package_dir / "manifest.json" + builds = json.loads(package_manifest.read_text()) + input_file_path = None + + for build in builds: + if ( + build["platform"] == "windows" + and build["file_extension"] == "zip" + and build["architecture"] == architecture + ): + input_file_path = builder.package_dir / build["file_name"] + break + if not input_file_path: + raise Exception(f"Windows package not found in [{builder.package_dir}] manifest") + + # Copy all required files into working folder + source_path = builder.code_path / "release" / "windows" / "msix" + dest_path = builder.store_windows_dir + worker.utils.info(f"Copying [{source_path}] -> [{dest_path}] for windows store packaging") + + for source_file in source_path.iterdir(): + if source_file.name == "README.md": + continue + if source_file.is_dir(): + worker.utils.copy_dir(source_file, dest_path / source_file.name) + else: + worker.utils.copy_file(source_file, dest_path / source_file.name) + + worker_config = builder.get_worker_config() + + cert_subject = worker_config.windows_store_certificate(builder.service_env_id) + certificate_id = f"CN={cert_subject}" + + msix_filepath = worker.blender.msix_package.pack( + store_version_id, input_file_path, certificate_id, lts=is_lts, dry_run=dry_run + ) + + if worker_config.windows_store_self_sign: + worker.blender.sign.sign_windows_files( + builder.service_env_id, [msix_filepath], certificate_id=certificate_id + ) + + if dry_run: + msix_filepath.write_text("Dry run dummy package file") + + # Clear out all msix files first + for old_msix_filepath in builder.package_dir.glob("*.msix"): + worker.utils.remove_file(old_msix_filepath) + + dest_path = builder.package_dir / msix_filepath.name + worker.utils.info(f"Copying [{msix_filepath}] -> [{dest_path}] for distribution") + worker.utils.copy_file(msix_filepath, dest_path) + worker.blender.pack.generate_file_hash(dest_path) + + +def package(builder: worker.deploy.CodeStoreBuilder) -> None: + dry_run = False + if not builder.platform == "windows": + if builder.service_env_id == "LOCAL": + worker.utils.warning("Performing dry run on LOCAL service environment") + dry_run = True + else: + raise Exception("Can only run this on Windows, aborting") + + branches_config = builder.get_branches_config() + expected_platforms = branches_config.code_official_platform_architectures[builder.track_id] + + for expected_platform in expected_platforms: + if expected_platform.startswith("windows"): + architecture = expected_platform.split("-")[1] + _package_architecture(builder, architecture, dry_run) + + +def deliver(builder: worker.deploy.CodeStoreBuilder) -> None: + worker.utils.info("Windows store delivery not implemented") diff --git a/config/worker/doc_api.py b/config/worker/doc_api.py new file mode 100755 index 0000000..06bf744 --- /dev/null +++ b/config/worker/doc_api.py @@ -0,0 +1,230 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import argparse +import os +import pathlib +import sys + +from collections import OrderedDict + +sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent)) + +import worker.configure +import worker.utils + +import worker.blender +import worker.blender.compile +import worker.blender.update +import worker.blender.version + + +class DocApiBuilder(worker.blender.CodeBuilder): + def __init__(self, args: argparse.Namespace): + super().__init__(args) + self.needs_package_delivery = args.needs_package_delivery + self.setup_track_path() + + +def download_api_dump_test_data(local_delivery_path: pathlib.Path) -> None: + import urllib.request + import json + + api_base_url = "https://docs.blender.org/api" + api_dump_index_url = f"{api_base_url}/api_dump_index.json" + + request = urllib.request.Request(api_dump_index_url, headers={"User-Agent": "Mozilla"}) + response = urllib.request.urlopen(request, timeout=5.0) + + api_dump_index_text = response.read().decode("utf-8", "ignore") + api_dump_index_path = local_delivery_path / "api_dump_index.json" + os.makedirs(api_dump_index_path.parent, exist_ok=True) + api_dump_index_path.write_text(api_dump_index_text) + + api_dump_index = json.loads(api_dump_index_text) + for version in api_dump_index.keys(): + api_dump_url = f"{api_base_url}/{version}/api_dump.json" + worker.utils.info(f"Download {api_dump_url}") + + request = urllib.request.Request(api_dump_url, headers={"User-Agent": "Mozilla"}) + response = urllib.request.urlopen(request, timeout=5.0) + + api_dump_text = response.read().decode("utf-8", "ignore") + api_dump_path = local_delivery_path / version / "api_dump.json" + os.makedirs(api_dump_path.parent, exist_ok=True) + api_dump_path.write_text(api_dump_text) + + +def compile_doc(builder: DocApiBuilder) -> None: + # Install requirements + os.chdir(builder.track_path) + doc_api_script_path = builder.code_path / "doc" / "python_api" + worker.utils.call_pipenv( + ["install", "--requirements", doc_api_script_path / "requirements.txt"] + ) + + # Clean build directory + worker.utils.remove_dir(builder.build_doc_path) + os.makedirs(builder.build_doc_path, exist_ok=True) + + os.chdir(doc_api_script_path) + + # Get API dumps data from server. + api_dump_build_path = builder.build_doc_path / "api_dump" + os.makedirs(api_dump_build_path, exist_ok=True) + + api_dump_include_paths = ["api_dump_index.json", "*/", "api_dump.json"] + api_dump_build_path_index = api_dump_build_path / "api_dump_index.json" + + worker_config = builder.get_worker_config() + connect_id = f"{worker_config.docs_user}@{worker_config.docs_machine}" + remote_path = ( + pathlib.Path(worker_config.docs_folder) + / "docs.blender.org" + / "htdocs" + / builder.service_env_id + / "api" + ) + + # Get data from docs.blender.org for local testing. + if builder.service_env_id == "LOCAL": + worker.utils.info("Downloading API dump data from docs.blender.org for testing") + download_api_dump_test_data(remote_path) + + source_path = f"{connect_id}:{remote_path}/" + dest_path = api_dump_build_path + + worker.utils.rsync( + source_path, dest_path, include_paths=api_dump_include_paths, exclude_paths=["*"] + ) + + version = worker.blender.version.VersionInfo(builder).short_version + api_dump_build_path_current_version = api_dump_build_path / version + os.makedirs(api_dump_build_path_current_version, exist_ok=True) + + # Generate API docs + cmd = [ + builder.blender_command_path(), + "--background", + "--factory-startup", + "-noaudio", + "--python", + doc_api_script_path / "sphinx_doc_gen.py", + "--", + "--output", + builder.build_doc_path, + "--api-changelog-generate", + "--api-dump-index-path", + api_dump_build_path_index, + ] + worker.utils.call(cmd) + + num_threads = worker.configure.get_thread_count(thread_memory_in_GB=1.25) + + in_path = builder.build_doc_path / "sphinx-in" + out_path = builder.build_doc_path / "sphinx-out-html" + worker.utils.call(["sphinx-build", "-b", "html", "-j", str(num_threads), in_path, out_path]) + + +def package(builder: DocApiBuilder) -> None: + os.chdir(builder.build_doc_path) + + version = worker.blender.version.VersionInfo(builder).short_version + version_file_label = version.replace(".", "_") + + package_name = f"blender_python_reference_{version_file_label}" + package_file_name = f"{package_name}.zip" + + cmd = ["7z", "a", "-tzip", package_file_name, "./sphinx-out-html", "-r"] + worker.utils.call(cmd) + + cmd = ["7z", "rn", package_file_name, "sphinx-out-html", package_name] + worker.utils.call(cmd) + + +def deliver(builder: DocApiBuilder) -> None: + # Get versions + branches_config = builder.get_branches_config() + version = worker.blender.version.VersionInfo(builder).short_version + dev_version = branches_config.track_major_minor_versions["vdev"] + latest_version = branches_config.doc_stable_major_minor_version + + # Get remote path + worker_config = builder.get_worker_config() + connect_id = f"{worker_config.docs_user}@{worker_config.docs_machine}" + remote_path = ( + pathlib.Path(worker_config.docs_folder) + / "docs.blender.org" + / "htdocs" + / builder.service_env_id + / "api" + ) + + version_remote_path = remote_path / version + worker.utils.call_ssh(connect_id, ["mkdir", "-p", version_remote_path]) + + change_modes = ["D0755", "F0644"] + + # Sync HTML files + source_path = f"{builder.build_doc_path}/sphinx-out-html/" + dest_path = f"{connect_id}:{version_remote_path}/" + worker.utils.rsync( + source_path, dest_path, exclude_paths=[".doctrees"], change_modes=change_modes + ) + + # Put API dumps data on the server. + api_dump_build_path = f"{builder.build_doc_path}/api_dump/" + api_dump_dest_path = f"{connect_id}:{remote_path}/" + worker.utils.rsync(api_dump_build_path, api_dump_dest_path, change_modes=change_modes) + + # Sync zip package + if builder.needs_package_delivery: + version_file_label = version.replace(".", "_") + + package_name = f"blender_python_reference_{version_file_label}" + package_file_name = f"{package_name}.zip" + + source_file_path = builder.build_doc_path / package_file_name + dest_file_path = f"{connect_id}:{version_remote_path}/{package_file_name}" + worker.utils.rsync( + source_file_path, dest_file_path, exclude_paths=[".doctrees"], change_modes=change_modes + ) + + # Create links + if builder.track_id == "vdev": + worker.utils.call_ssh( + connect_id, ["ln", "-svF", remote_path / dev_version, remote_path / "dev"] + ) + worker.utils.call_ssh( + connect_id, ["ln", "-svF", remote_path / dev_version, remote_path / "master"] + ) + worker.utils.call_ssh( + connect_id, ["ln", "-svF", remote_path / dev_version, remote_path / "main"] + ) + worker.utils.call_ssh( + connect_id, ["ln", "-svF", remote_path / latest_version, remote_path / "latest"] + ) + worker.utils.call_ssh( + connect_id, ["ln", "-svF", remote_path / latest_version, remote_path / "current"] + ) + + +if __name__ == "__main__": + steps: worker.utils.BuilderSteps = OrderedDict() + steps["configure-machine"] = worker.configure.configure_machine + steps["update-code"] = worker.blender.update.update + steps["compile-code"] = worker.blender.compile.compile_code + steps["compile-install"] = worker.blender.compile.compile_install + steps["compile"] = compile_doc + steps["package"] = package + steps["deliver"] = deliver + steps["clean"] = worker.blender.CodeBuilder.clean + + parser = worker.blender.create_argument_parser(steps=steps) + parser.add_argument("--needs-package-delivery", action="store_true", required=False) + + args = parser.parse_args() + builder = DocApiBuilder(args) + builder.run(args.step, steps) diff --git a/config/worker/doc_developer.py b/config/worker/doc_developer.py new file mode 100755 index 0000000..50fbd8f --- /dev/null +++ b/config/worker/doc_developer.py @@ -0,0 +1,79 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import argparse +import os +import pathlib +import sys + +from collections import OrderedDict + +sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent)) + +import worker.utils + + +class DocDeveloperBuilder(worker.utils.Builder): + def __init__(self, args: argparse.Namespace): + super().__init__(args, "blender", "blender-developer-docs") + self.build_path = self.track_path / "build_developer_docs" + self.output_path = self.build_path / "html" + self.setup_track_path() + + +def update(builder: DocDeveloperBuilder) -> None: + builder.update_source() + + +def compile_doc(builder: DocDeveloperBuilder) -> None: + os.chdir(builder.track_path) + worker.utils.call_pipenv(["install", "--requirements", builder.code_path / "requirements.txt"]) + + worker.utils.remove_dir(builder.output_path) + + os.makedirs(builder.build_path, exist_ok=True) + os.chdir(builder.build_path) + + mkdocs_yml_path = builder.code_path / "mkdocs.yml" + worker.utils.call_pipenv( + ["run", "mkdocs", "build", "-f", mkdocs_yml_path, "-d", builder.output_path] + ) + + +def deliver(builder: DocDeveloperBuilder) -> None: + worker_config = builder.get_worker_config() + + remote_path = f"developer.blender.org/webroot/{builder.service_env_id}/docs" + + connect_id = f"{worker_config.docs_user}@{worker_config.docs_machine}" + server_docs_path = pathlib.Path(worker_config.docs_folder) / pathlib.Path(remote_path) + + change_modes = ["D0755", "F0644"] + source_path = f"{builder.output_path}/" + dest_path = f"{connect_id}:{server_docs_path}/" + + worker.utils.call_ssh(connect_id, ["mkdir", "-p", server_docs_path]) + worker.utils.rsync( + source_path, + dest_path, + change_modes=change_modes, + port=worker_config.docs_port, + delete=True, + delete_path_check=f"/developer.blender.org/webroot/{builder.service_env_id}/docs", + ) + + +if __name__ == "__main__": + steps: worker.utils.BuilderSteps = OrderedDict() + steps["update"] = update + steps["compile"] = compile_doc + steps["deliver"] = deliver + + parser = worker.utils.create_argument_parser(steps=steps) + parser.add_argument("--needs-package-delivery", action="store_true", required=False) + + args = parser.parse_args() + builder = DocDeveloperBuilder(args) + builder.run(args.step, steps) diff --git a/config/worker/doc_manual.py b/config/worker/doc_manual.py new file mode 100755 index 0000000..c15654a --- /dev/null +++ b/config/worker/doc_manual.py @@ -0,0 +1,289 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import argparse +import json +import os +import pathlib +import re +import sys +import time + +from collections import OrderedDict +from datetime import timedelta +from typing import Optional, Sequence + +sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent)) + +import worker.configure +import worker.utils + + +class ManualBuilder(worker.utils.Builder): + def __init__(self, args: argparse.Namespace): + super().__init__(args, "blender", "blender-manual") + self.needs_all_locales = args.needs_all_locales + self.needs_package_delivery = args.needs_package_delivery + self.doc_format = args.doc_format + self.build_path = self.track_path / "build" + self.setup_track_path() + + def get_locales(self) -> Sequence[str]: + locales = ["en"] + if self.needs_all_locales: + locale_path = self.code_path / "locale" + locales += [ + item.name for item in locale_path.iterdir() if not item.name.startswith(".") + ] + return locales + + +def update(builder: ManualBuilder) -> None: + builder.update_source() + if builder.needs_all_locales: + worker.utils.update_source( + "blender", "blender-manual-translations", builder.code_path / "locale" + ) + + +def check(builder: ManualBuilder) -> None: + os.chdir(builder.track_path) + worker.utils.call_pipenv(["install", "--pre", "--requirements", builder.code_path / "requirements.txt"]) + + os.chdir(builder.code_path) + + make_cmd = "make.bat" if builder.platform == "windows" else "make" + worker.utils.call_pipenv(["run", make_cmd, "check_structure"]) + # worker.utils.call_pipenv(["run", make_cmd, "check_syntax"]) + # worker.utils.call_pipenv(["run", make_cmd, "check_spelling"]) + + +def compile_doc(builder: ManualBuilder) -> None: + # Install requirements. + os.chdir(builder.track_path) + worker.utils.call_pipenv(["install", "--pre", "--requirements", builder.code_path / "requirements.txt"]) + + # Determine format and locales + locales = builder.get_locales() + doc_format = builder.doc_format + + # Clean build folder + worker.utils.remove_dir(builder.build_path) + os.makedirs(builder.build_path, exist_ok=True) + os.chdir(builder.code_path) + + branches_config = builder.get_branches_config() + + # Check manual version matches track. + conf_file_path = builder.code_path / "manual" / "conf.py" + conf_text = conf_file_path.read_text() + match = re.search(r"blender_version\s*=\s*['\"](.*)['\"]", conf_text) + expected_version = branches_config.track_major_minor_versions[builder.track_id] + found_version = match.groups(0)[0] if match else "nothing" + if found_version != expected_version: + raise Exception( + f"Expected blender_version {expected_version}, but found {found_version} in manual/conf.py" + ) + + def filter_output(line: str) -> Optional[str]: + if line.find("WARNING: unknown mimetype for .doctrees") != -1: + return None + elif line.find("copying images...") != -1: + return None + return line + + # Generate manual + for locale in locales: + start_timestamp = time.time() + worker.utils.info(f"Generating {locale} in {doc_format}") + + num_threads = worker.configure.get_thread_count(thread_memory_in_GB=1.25) + + os.chdir(builder.code_path) + build_output_path = builder.build_path / doc_format / locale + + worker.utils.call_pipenv( + [ + "run", + "sphinx-build", + "-b", + doc_format, + "-j", + str(num_threads), + "-D", + f"language={locale}", + "./manual", + build_output_path, + ], + filter_output=filter_output, + ) + + if doc_format == "epub": + if not build_output_path.rglob("*.epub"): + raise Exception(f"Expected epub files missing in {build_output_path}") + + # Hack appropriate versions.json URL into version_switch.js + worker.utils.info("Replacing URL in version_switch.js") + + version_switch_file_path = build_output_path / "_static" / "js" / "version_switch.js" + versions_file_url = f"https://docs.blender.org/{builder.service_env_id}/versions.json" + + version_switch_text = version_switch_file_path.read_text() + version_switch_text = version_switch_text.replace( + "https://docs.blender.org/versions.json", versions_file_url + ) + version_switch_text = version_switch_text.replace( + "https://docs.blender.org/PROD/versions.json", versions_file_url + ) + version_switch_text = version_switch_text.replace( + "https://docs.blender.org/UATEST/versions.json", versions_file_url + ) + version_switch_file_path.write_text(version_switch_text) + + time_total = time.time() - start_timestamp + time_delta = str(timedelta(seconds=time_total)) + worker.utils.info(f"Generated {locale} in {doc_format} in {time_delta}") + + +def package(builder: ManualBuilder) -> None: + if not builder.needs_package_delivery: + worker.utils.info("No package delivery needed, skipping packaging") + return + + locales = builder.get_locales() + doc_format = builder.doc_format + + os.chdir(builder.build_path) + + compression_option = "" # "-mx=9" + package_file_name = f"blender_manual_{doc_format}.zip" + + build_package_path = builder.build_path / "package" + + for locale in locales: + package_file_path = build_package_path / locale / package_file_name + worker.utils.remove_file(package_file_path) + + source_path = f"{doc_format}/{locale}" + + cmd = [ + "7z", + "a", + "-tzip", + package_file_path, + source_path, + "-r", + "-xr!.doctrees", + compression_option, + ] + worker.utils.call(cmd) + + cmd = [ + "7z", + "rn", + package_file_path, + source_path, + f"blender_manual_{builder.track_id}_{locale}.{doc_format}", + ] + worker.utils.call(cmd) + + +def deliver(builder: ManualBuilder) -> None: + locales = builder.get_locales() + doc_format = builder.doc_format + + # Get versions + branches_config = builder.get_branches_config() + version = branches_config.track_major_minor_versions[builder.track_id] + dev_version = branches_config.track_major_minor_versions["vdev"] + latest_version = branches_config.doc_stable_major_minor_version + + # Get remote paths + worker_config = builder.get_worker_config() + connect_id = f"{worker_config.docs_user}@{worker_config.docs_machine}" + docs_remote_path = ( + pathlib.Path(worker_config.docs_folder) + / "docs.blender.org" + / "htdocs" + / builder.service_env_id + ) + + # Sync each locale + for locale in locales: + worker.utils.info(f"Syncing {locale}") + + # Create directory + remote_path = docs_remote_path / "manual" / locale + version_remote_path = remote_path / version + worker.utils.call_ssh(connect_id, ["mkdir", "-p", version_remote_path]) + + if doc_format == "html": + # Sync html files + source_path = f"{builder.build_path}/{doc_format}/{locale}/" + dest_path = f"{connect_id}:{version_remote_path}/" + # Exclude packaged download files; these get synced with `needs_package_delivery`. + worker.utils.rsync( + source_path, + dest_path, + exclude_paths=[".doctrees", "blender_manual_*.zip"], + delete=True, + delete_path_check=str(version_remote_path) + ) + + # Create links + if builder.track_id == "vdev": + worker.utils.info(f"Creating links for {locale}") + worker.utils.call_ssh( + connect_id, ["ln", "-svF", remote_path / dev_version, remote_path / "dev"] + ) + worker.utils.call_ssh( + connect_id, ["ln", "-svF", remote_path / latest_version, remote_path / "latest"] + ) + + if builder.needs_package_delivery: + # Sync zip package + worker.utils.info(f"Syncing package for {locale}") + build_package_path = builder.build_path / "package" + package_file_name = f"blender_manual_{doc_format}.zip" + source_path = build_package_path / locale / package_file_name + dest_path = f"{connect_id}:{version_remote_path}/{package_file_name}" + worker.utils.rsync(source_path, dest_path, exclude_paths=[".doctrees"]) + + # Create and sync versions.json + worker.utils.info("Creating and syncing versions.json") + + doc_version_labels = branches_config.doc_manual_version_labels + versions_path = builder.build_path / "versions.json" + versions_path.write_text(json.dumps(doc_version_labels, indent=2)) + worker.utils.info(versions_path.read_text()) + + dest_path = f"{connect_id}:{docs_remote_path}/versions.json" + worker.utils.rsync(versions_path, dest_path) + + +def clean(builder: ManualBuilder) -> None: + worker.utils.remove_dir(builder.build_path) + + +if __name__ == "__main__": + steps: worker.utils.BuilderSteps = OrderedDict() + steps["configure-machine"] = worker.configure.configure_machine + steps["update"] = update + steps["check"] = check + steps["compile"] = compile_doc + steps["package"] = package + steps["deliver"] = deliver + steps["clean"] = clean + + parser = worker.utils.create_argument_parser(steps=steps) + parser.add_argument("--needs-all-locales", action="store_true", required=False) + parser.add_argument("--needs-package-delivery", action="store_true", required=False) + parser.add_argument( + "--doc-format", default="html", type=str, required=False, choices=["html", "epub"] + ) + + args = parser.parse_args() + builder = ManualBuilder(args) + builder.run(args.step, steps) diff --git a/config/worker/doc_studio.py b/config/worker/doc_studio.py new file mode 100755 index 0000000..3b6104a --- /dev/null +++ b/config/worker/doc_studio.py @@ -0,0 +1,96 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +import argparse +import os +import pathlib +import sys + +from collections import OrderedDict + +sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent)) + +import worker.utils + + +class DocStudioBuilder(worker.utils.Builder): + def __init__(self, args: argparse.Namespace): + super().__init__(args, "studio", "blender-studio-tools") + self.setup_track_path() + + +def update(builder: worker.utils.Builder) -> None: + builder.update_source(update_submodules=True) + + +def compile_doc(builder: worker.utils.Builder) -> None: + docs_path = builder.code_path / "docs" + os.chdir(docs_path) + + worker.utils.call(["npm", "install"]) + worker.utils.call(["npm", "run", "docs:build"]) + + +def deliver(builder: worker.utils.Builder) -> None: + dry_run = False + if builder.service_env_id not in ("PROD", "LOCAL"): + worker.utils.warning("Delivery from non-PROD is dry run only") + dry_run = True + + worker_config = builder.get_worker_config() + connect_id = f"{worker_config.studio_user}@{worker_config.studio_machine}" + change_modes = ["D0755", "F0644"] + + if builder.service_env_id == "LOCAL" and builder.platform == "darwin": + worker.utils.warning("rsync change_owner not supported on darwin, ignoring for LOCAL") + change_owner = None + else: + change_owner = "buildbot:www-data" + + # Content of the website. + docs_local_path = builder.code_path / "docs" / ".vitepress" / "dist" + docs_remote_path = pathlib.Path(worker_config.studio_folder) + + docs_source_path = f"{docs_local_path}/" + docs_dest_path = f"{connect_id}:{docs_remote_path}/" + worker.utils.rsync( + docs_source_path, + docs_dest_path, + change_modes=change_modes, + change_owner=change_owner, + port=worker_config.studio_port, + dry_run=dry_run, + ) + + # Downloadable artifacts. + artifacts_local_path = builder.code_path / "dist" + artifacts_remote_path = docs_remote_path / "download" + if artifacts_local_path.exists(): + artifacts_source_path = f"{artifacts_local_path}/" + artifact_dest_path = f"{connect_id}:{artifacts_remote_path}/" + worker.utils.rsync( + artifacts_source_path, + artifact_dest_path, + change_modes=change_modes, + change_owner=change_owner, + port=worker_config.studio_port, + dry_run=dry_run, + ) + else: + worker.utils.info("No downloadable artifacts to be copied over") + + +if __name__ == "__main__": + steps: worker.utils.BuilderSteps = OrderedDict() + steps["update"] = update + steps["compile"] = compile_doc + steps["deliver"] = deliver + + parser = worker.utils.create_argument_parser(steps=steps) + parser.add_argument("--needs-package-delivery", action="store_true", required=False) + + args = parser.parse_args() + builder = DocStudioBuilder(args) + builder.run(args.step, steps) diff --git a/config/worker/utils.py b/config/worker/utils.py new file mode 100644 index 0000000..ba2e77f --- /dev/null +++ b/config/worker/utils.py @@ -0,0 +1,549 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# SPDX-FileCopyrightText: 2011-2024 Blender Authors +# + +## Utility functions used by all builders. + +import argparse +import atexit +import logging +import os +import pathlib +import platform +import re +import shutil +import subprocess +import sys +import time + +from collections import OrderedDict +from typing import Any, Callable, Dict, List, Optional, Sequence, Union + +# Logging +_error_pattern = re.compile( + r"(^FATAL:|^ERROR:|^ERROR!|^Unhandled Error|^Traceback| error: | error | Error |FAILED: |ninja: build stopped: subcommand failed|CMake Error|SEGFAULT|Exception: SegFault |The following tests FAILED:|\*\*\*Failed|\*\*\*Exception|\*\*\*Abort|^fatal:)" +) +_warning_pattern = re.compile( + r"(^WARNING:|^WARNING!|^WARN |Warning: | warning: | warning |warning | nvcc warning :|CMake Warning)" +) +_ignore_pattern = re.compile( + r"(SignTool Error: CryptSIPRemoveSignedDataMsg returned error: 0x00000057|unknown mimetype for .*doctree)" +) + +_errors: List[str] = [] +_warnings: List[str] = [] + + +def _print_warning(msg: str) -> None: + print("\033[33m" + msg + "\033[0m", flush=True) + + +def _print_error(msg: str) -> None: + print("\033[31m" + msg + "\033[0m", flush=True) + + +def _print_cmd(msg: str) -> None: + print("\033[32m" + msg + "\033[0m", flush=True) + + +def _exit_handler() -> None: + if len(_warnings): + print("") + print("=" * 80) + print("WARNING Summary:") + print("=" * 80) + for msg in _warnings: + _print_warning(msg) + if len(_errors): + print("") + print("=" * 80) + print("ERROR Summary:") + print("=" * 80) + for msg in _errors: + _print_error(msg) + + +atexit.register(_exit_handler) + + +def info(msg: str) -> None: + print("INFO: " + msg, flush=True) + + +def warning(msg: str) -> None: + _print_warning("WARN: " + msg) + global _warnings + _warnings += [msg] + + +def error(msg: str) -> None: + _print_error("ERROR: " + msg) + global _errors + _errors += [msg] + + +def exception(e: BaseException) -> None: + logging.exception(e) + global _errors + _errors += [str(e)] + + +def _log_cmd(msg: str) -> None: + if re.search(_error_pattern, msg): + if not re.search(_ignore_pattern, msg): + _print_error(msg) + global _errors + _errors += [msg] + return + elif re.search(_warning_pattern, msg): + if not re.search(_ignore_pattern, msg): + _print_warning(msg) + global _warnings + _warnings += [msg] + return + + print(msg.encode('ascii', errors='replace').decode('ascii'), flush=True) + + +# Command execution +class HiddenArgument: + def __init__(self, value: Union[str, pathlib.Path]): + self.value = value + + +CmdArgument = Union[str, pathlib.Path, HiddenArgument, Any] +CmdList = List[CmdArgument] +CmdSequence = Sequence[CmdArgument] +CmdFilterOutput = Optional[Callable[[str], Optional[str]]] +CmdEnvironment = Optional[Dict[str, str]] + + +def _prepare_call(cmd: CmdSequence, dry_run: bool = False) -> Sequence[Union[str, pathlib.Path]]: + real_cmd: List[Union[str, pathlib.Path]] = [] + log_cmd: List[str] = [] + + for arg in cmd: + if isinstance(arg, HiddenArgument): + real_cmd += [arg.value] + else: + log_cmd += [str(arg)] + real_cmd += [arg] + + if dry_run: + info(f"Dry run command in path [{os.getcwd()}]") + else: + info(f"Run command in path [{os.getcwd()}]") + _print_cmd(" ".join(log_cmd)) + + return real_cmd + + +def call( + cmd: CmdSequence, + env: CmdEnvironment = None, + exit_on_error: bool = True, + filter_output: CmdFilterOutput = None, + retry_count: int = 0, + retry_wait_time: float = 1.0, + dry_run: bool = False, +) -> int: + cmd = _prepare_call(cmd, dry_run) + if dry_run: + return 0 + + for try_count in range(0, retry_count + 1): + # Flush to ensure correct order output on Windows. + sys.stdout.flush() + sys.stderr.flush() + + proc = subprocess.Popen( + cmd, + env=env, + bufsize=1, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + universal_newlines=True, + encoding="utf-8", + errors="ignore", + ) + while True: + if not proc.stdout: + break + + line = proc.stdout.readline() + if line: + line_str = line.strip("\n\r") + if filter_output: + line_str_filter = filter_output(line_str) + else: + line_str_filter = line_str + if line_str: + _log_cmd(line_str) + else: + break + + proc.communicate() + + if proc.returncode == 0: + return 0 + + if try_count == retry_count: + if exit_on_error: + sys.exit(proc.returncode) + return proc.returncode + else: + warning("Command failed, retrying") + time.sleep(retry_wait_time) + + return -1 + + +def check_output(cmd: CmdSequence, exit_on_error: bool = True) -> str: + cmd = _prepare_call(cmd) + + # Flush to ensure correct order output on Windows. + sys.stdout.flush() + sys.stderr.flush() + + try: + output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, universal_newlines=True) + except subprocess.CalledProcessError as e: + if exit_on_error: + sys.exit(e.returncode) + output = "" + + return output.strip() + + +def call_pipenv( + cmd: CmdSequence, filter_output: CmdFilterOutput = None, dry_run: bool = False +) -> int: + cmd_prefix: CmdList = ["pipenv"] + return call(cmd_prefix + list(cmd), filter_output=filter_output, dry_run=dry_run) + + +def call_ssh(connect_id: str, cmd: CmdSequence, dry_run: bool = False) -> int: + ssh_cmd = [ + "ssh", + "-o", + "ConnectTimeout=20", + HiddenArgument(connect_id), + " ".join([str(arg) for arg in cmd]), + ] + return call(ssh_cmd, retry_count=3, dry_run=dry_run) + + +def rsync( + source_path: Union[pathlib.Path, str], + dest_path: Union[pathlib.Path, str], + exclude_paths: Sequence[str] = [], + include_paths: Sequence[str] = [], + change_modes: Sequence[str] = [], + change_owner: Optional[str] = None, + show_names: bool = False, + delete: bool = False, + delete_path_check: Optional[str] = None, + dry_run: bool = False, + port: int = 22, + retry_count: int = 3, +) -> int: + # Extra check on path, because delete is risky if pointed at a + # root folder that contains other data. + if delete: + if not delete_path_check: + raise Exception("Rsync: delete requires delete_path_check") + if str(dest_path).find(delete_path_check) == -1: + raise Exception("Rsync: remote path must contain '{delete_path_check}'") + + info_options = "progress0,flist0,name0,stats2" + if show_names: + info_options = "progress0,flist0,name1,stats2" + + cmd: List[Union[str, pathlib.Path, HiddenArgument]] = [ + "rsync", + # SSH options + "-e", + f"ssh -o ConnectTimeout=20 -p {port}", + # The -rlpgoDv options below are equivalent to --archive apart from updating + # the timestamp of the files on the receiving side. This should prevent them + # from getting marked for zfs-snapshots. + "--timeout=60", + "--checksum", + "-rlpgoDv", + "--partial", + ] + if change_owner: + cmd += [f"--chown={change_owner}"] + if delete: + cmd += ["--delete"] + # cmd += [f"--info={info_options}"] + cmd += [f"--include={item}" for item in include_paths] + cmd += [f"--exclude={item}" for item in exclude_paths] + cmd += [f"--chmod={item}" for item in change_modes] + + cmd += [source_path] + cmd += [HiddenArgument(dest_path)] + + return call(cmd, retry_count=retry_count, dry_run=dry_run) + + +def move(path_from: pathlib.Path, path_to: pathlib.Path, dry_run: bool = False) -> None: + if dry_run: + return + # str() works around typing bug in Python 3.6. + shutil.move(str(path_from), path_to) + + +def copy_dir(path_from: pathlib.Path, path_to: pathlib.Path, dry_run: bool = False) -> None: + if dry_run: + return + shutil.copytree(path_from, path_to) + + +def copy_file(path_from: pathlib.Path, path_to: pathlib.Path, dry_run: bool = False) -> None: + if dry_run: + return + shutil.copy2(path_from, path_to) + + +def remove_file( + path: pathlib.Path, retry_count: int = 3, retry_wait_time: float = 5.0, dry_run: bool = False +) -> None: + if not path.exists(): + return + if dry_run: + info(f"Removing {path} (dry run)") + return + + info(f"Removing {path}") + for try_count in range(0, retry_count): + try: + try: + if path.exists(): + path.unlink() + except FileNotFoundError: + pass + return + except: + time.sleep(retry_wait_time) + + # Not using missing_ok yet for Python3.6 compatibility. + try: + if path.exists(): + path.unlink() + except FileNotFoundError: + pass + + +# Retry several times by default, giving it a chance for possible antivirus to release +# a lock on files in the build folder. Happened for example with MSI files on Windows. +def remove_dir( + path: pathlib.Path, retry_count: int = 3, retry_wait_time: float = 5.0, dry_run: bool = False +) -> None: + if not path.exists(): + return + if dry_run: + info(f"Removing {path} (dry run)") + return + + info(f"Removing {path}") + for try_count in range(0, retry_count): + try: + if path.exists(): + shutil.rmtree(path) + return + except: + if platform.system().lower() == "windwos": + # XXX: Windows builder debug. + # Often the `build_package` is failed to be removed because + # of the "Access Denied" error on blender-windows64.msi. + # Run some investigation commands to see what is going on. + if path.name == "build_package": + info("Removal of package artifacts folder failed. Investigating...") + msi_path = ( + path / "_CPack_Packages" / "Windows" / "WIX" / "blender-windows64.msi" + ) + if msi_path.exists(): + info(f"Information about [{msi_path}]") + call(["handle64", msi_path], exit_on_error=False) + call( + ["pwsh", "-command", f"Get-Item {msi_path} | Format-List"], + exit_on_error=False, + ) + call( + ["pwsh", "-command", f"Get-Acl {msi_path} | Format-List"], + exit_on_error=False, + ) + else: + info(f"MSI package file [{msi_path}] does not exist") + + time.sleep(retry_wait_time) + + if path.exists(): + shutil.rmtree(path) + + +def is_tool(name: Union[str, pathlib.Path]) -> bool: + """Check whether `name` is on PATH and marked as executable.""" + return shutil.which(name) is not None + + +# Update source code from git repository. +def update_source( + app_org: str, + app_id: str, + code_path: pathlib.Path, + branch_id: str = "main", + patch_id: Optional[str] = None, + commit_id: Optional[str] = None, + update_submodules: bool = False, +) -> None: + repo_url = f"https://projects.blender.org/{app_org}/{app_id}.git" + + if not code_path.exists(): + # Clone new + info(f"Cloning {repo_url}") + call(["git", "clone", "--progress", repo_url, code_path]) + else: + for index_lock_path in code_path.rglob(".git/index.lock"): + warning("Removing git lock, probably left behind by killed git process") + remove_file(index_lock_path) + for index_lock_path in (code_path / ".git" / "modules").rglob("index.lock"): + warning("Removing submodule git lock, probably left behind by killed git process") + remove_file(index_lock_path) + + os.chdir(code_path) + + # Fix error: "fatal: bad object refs/remotes/origin/HEAD" + call(["git", "remote", "set-head", "origin", "--auto"]) + + # Change to new Gitea URL. + call(["git", "remote", "set-url", "origin", repo_url]) + call(["git", "submodule", "sync"]) + + # Fetch and clean + call(["git", "fetch", "origin", "--prune"]) + call(["git", "clean", "-f", "-d"]) + call(["git", "reset", "--hard"]) + + rebase_merge_path = code_path / ".git" / "rebase-merge" + if rebase_merge_path.exists(): + info(f"Path {rebase_merge_path} exists, removing !") + shutil.rmtree(rebase_merge_path) + + if patch_id: + # Pull request. + pull_request_id = patch_id + branch_name = f"PR{pull_request_id}" + + # Checkout pull request into PR123 branch. + call(["git", "checkout", "main"]) + call(["git", "fetch", "-f", "origin", f"pull/{pull_request_id}/head:{branch_name}"]) + call(["git", "checkout", branch_name]) + + if commit_id and (commit_id != "HEAD"): + call(["git", "reset", "--hard", commit_id]) + else: + # Branch. + call(["git", "checkout", branch_id]) + + if commit_id and (commit_id != "HEAD"): + call(["git", "reset", "--hard", commit_id]) + else: + call(["git", "reset", "--hard", "origin/" + branch_id]) + + if update_submodules: + call(["git", "submodule", "init"]) + + # Resolve potential issues with submodules even if other code + # is responsible for updating them. + call(["git", "submodule", "foreach", "git", "clean", "-f", "-d"]) + call(["git", "submodule", "foreach", "git", "reset", "--hard"]) + + if update_submodules: + call(["git", "submodule", "update"]) + + +# Workaroud missing type info in 3.8. +if sys.version_info >= (3, 9): + BuilderSteps = OrderedDict[str, Callable[[Any], None]] +else: + BuilderSteps = Any + + +class Builder: + def __init__(self, args: argparse.Namespace, app_org: str, app_id: str): + self.service_env_id = args.service_env_id + self.track_id = args.track_id + self.branch_id = args.branch_id + self.patch_id = args.patch_id + self.commit_id = args.commit_id + self.platform = platform.system().lower() + self.architecture = platform.machine().lower() + self.app_org = app_org + self.app_id = app_id + + if not self.branch_id: + branches_config = self.get_branches_config() + self.branch_id = branches_config.track_code_branches[self.track_id] + + self.tracks_root_path = self.get_worker_config().tracks_root_path + self.track_path = self.tracks_root_path / (self.app_id + "-" + self.track_id) + self.code_path = self.track_path / (self.app_id + ".git") + + info(f"Setting up builder paths from [{self.track_path}]") + + def setup_track_path(self) -> None: + # Create track directory if doesn't exist already. + os.makedirs(self.track_path, exist_ok=True) + os.chdir(self.track_path) + + # Clean up any existing pipenv files. + remove_file(self.track_path / "Pipfile") + remove_file(self.track_path / "Pipfile.lock") + remove_file(self.code_path / "Pipfile") + remove_file(self.code_path / "Pipfile.lock") + + def update_source(self, update_submodules: bool = False) -> None: + update_source( + self.app_org, + self.app_id, + self.code_path, + branch_id=self.branch_id, + patch_id=self.patch_id, + commit_id=self.commit_id, + update_submodules=update_submodules, + ) + + def run(self, step: str, steps: BuilderSteps) -> None: + try: + if step == "all": + for func in steps.values(): + func(self) + else: + steps[step](self) + except Exception as e: + exception(e) + sys.exit(1) + + def get_worker_config(self) -> Any: + import conf.worker + + return conf.worker.get_config(self.service_env_id) + + def get_branches_config(self) -> Any: + import conf.branches + + return conf.branches + + +def create_argument_parser(steps: BuilderSteps) -> argparse.ArgumentParser: + parser = argparse.ArgumentParser() + parser.add_argument("--service-env-id", type=str, required=False, default="LOCAL") + parser.add_argument("--track-id", default="vdev", type=str, required=False) + parser.add_argument("--branch-id", default="", type=str, required=False) + parser.add_argument("--patch-id", default="", type=str, required=False) + parser.add_argument("--commit-id", default="", type=str, required=False) + all_steps = list(steps.keys()) + ["all"] + parser.add_argument("step", choices=all_steps) + return parser -- 2.45.2 From d6bce1b39d50c3cca97b9c2a7c63537ca620f953 Mon Sep 17 00:00:00 2001 From: Bart van der Braak Date: Tue, 19 Nov 2024 21:59:53 +0100 Subject: [PATCH 06/13] Formatting and linting fixes --- config/conf/auth.py | 32 ++++-- config/gitea/blender.py | 4 +- config/gitea/reporter.py | 29 ++++-- config/pipeline/code.py | 154 +++++++++++++++++++--------- config/pipeline/code_store.py | 14 ++- config/pipeline/common.py | 17 ++- config/pipeline/doc_manual.py | 5 +- config/worker/archive.py | 51 ++++++--- config/worker/blender/__init__.py | 36 +++++-- config/worker/blender/benchmark.py | 1 - config/worker/blender/bundle_dmg.py | 42 ++++++-- config/worker/blender/compile.py | 71 +++++++++---- config/worker/blender/lint.py | 2 +- config/worker/blender/pack.py | 50 ++++++--- config/worker/blender/sign.py | 24 +++-- config/worker/blender/test.py | 4 +- config/worker/blender/update.py | 8 +- config/worker/blender/version.py | 18 +++- config/worker/code_store.py | 4 +- config/worker/configure.py | 21 ++-- config/worker/deploy/artifacts.py | 65 ++++++++---- config/worker/deploy/monitor.py | 14 ++- config/worker/deploy/pypi.py | 8 +- config/worker/deploy/snap.py | 30 ++++-- config/worker/deploy/source.py | 4 +- config/worker/deploy/steam.py | 25 +++-- config/worker/deploy/windows.py | 12 ++- config/worker/doc_api.py | 35 +++++-- config/worker/doc_developer.py | 8 +- config/worker/doc_manual.py | 39 +++++-- config/worker/doc_studio.py | 4 +- config/worker/utils.py | 112 +++++++++++++------- 32 files changed, 675 insertions(+), 268 deletions(-) diff --git a/config/conf/auth.py b/config/conf/auth.py index 93efe66..8d67280 100644 --- a/config/conf/auth.py +++ b/config/conf/auth.py @@ -32,12 +32,18 @@ def fetch_authorization(devops_env_id: str): deploy_dev_usernames = auth_config.deploy_dev_usernames trusted_dev_usernames = auth_config.trusted_dev_usernames - dev_usernames = list(set(deploy_dev_usernames + trusted_dev_usernames + admin_usernames)) + dev_usernames = list( + set(deploy_dev_usernames + trusted_dev_usernames + admin_usernames) + ) deploy_usernames = list(set(deploy_dev_usernames + admin_usernames)) file_based_group_username_role_matchers = [ - buildbot.plugins.util.RolesFromUsername(roles=["admin"], usernames=admin_usernames), - buildbot.plugins.util.RolesFromUsername(roles=["deploy"], usernames=deploy_usernames), + buildbot.plugins.util.RolesFromUsername( + roles=["admin"], usernames=admin_usernames + ), + buildbot.plugins.util.RolesFromUsername( + roles=["deploy"], usernames=deploy_usernames + ), buildbot.plugins.util.RolesFromUsername(roles=["dev"], usernames=dev_usernames), ] @@ -52,9 +58,15 @@ def fetch_authorization(devops_env_id: str): # buildbot.plugins.util.AnyEndpointMatcher(role='dev', defaultDeny=False), # buildbot.plugins.util.AnyEndpointMatcher(role='coordinator', defaultDeny=False), # buildbot.plugins.util.AnyEndpointMatcher(role='anonymous', defaultDeny=False), - buildbot.plugins.util.StopBuildEndpointMatcher(role="dev", defaultDeny=True), - buildbot.plugins.util.RebuildBuildEndpointMatcher(role="dev", defaultDeny=True), - buildbot.plugins.util.EnableSchedulerEndpointMatcher(role="admin", defaultDeny=True), + buildbot.plugins.util.StopBuildEndpointMatcher( + role="dev", defaultDeny=True + ), + buildbot.plugins.util.RebuildBuildEndpointMatcher( + role="dev", defaultDeny=True + ), + buildbot.plugins.util.EnableSchedulerEndpointMatcher( + role="admin", defaultDeny=True + ), # buildbot.plugins.util.AnyEndpointMatcher(role='any', defaultDeny=False), # Force roles buildbot.plugins.util.ForceBuildEndpointMatcher( @@ -95,10 +107,14 @@ def fetch_authorization(devops_env_id: str): builder="*-doc-*", role="dev", defaultDeny=True ), # This also affects starting jobs via force scheduler - buildbot.plugins.util.AnyControlEndpointMatcher(role="admin", defaultDeny=True), + buildbot.plugins.util.AnyControlEndpointMatcher( + role="admin", defaultDeny=True + ), # A default deny for any endpoint if not admin # If this is missing at the end, any UNMATCHED group will get 'allow'... - buildbot.plugins.util.AnyControlEndpointMatcher(role="admin", defaultDeny=True), + buildbot.plugins.util.AnyControlEndpointMatcher( + role="admin", defaultDeny=True + ), ], roleMatchers=file_based_group_username_role_matchers, ) diff --git a/config/gitea/blender.py b/config/gitea/blender.py index 8f53811..4bd27c1 100644 --- a/config/gitea/blender.py +++ b/config/gitea/blender.py @@ -31,7 +31,9 @@ def setup_service(devops_env_id: str): if gitea_api_token: log.msg("Found Gitea API token, enabling status push") - return gitea.reporter.GiteaStatusService11(gitea_url, gitea_api_token, verbose=False) + return gitea.reporter.GiteaStatusService11( + gitea_url, gitea_api_token, verbose=False + ) else: log.msg("No Gitea API token found, status push disabled") return None diff --git a/config/gitea/reporter.py b/config/gitea/reporter.py index 1e1f610..cbcdbb0 100644 --- a/config/gitea/reporter.py +++ b/config/gitea/reporter.py @@ -81,7 +81,9 @@ class GiteaStatusService11(http.ReporterBase): yield super().reconfigService(generators=generators, **kwargs) self.context = context or Interpolate("buildbot/%(prop:buildername)s") - self.context_pr = context_pr or Interpolate("buildbot/pull_request/%(prop:buildername)s") + self.context_pr = context_pr or Interpolate( + "buildbot/pull_request/%(prop:buildername)s" + ) if baseURL.endswith("/"): baseURL = baseURL[:-1] self.baseURL = baseURL @@ -107,7 +109,14 @@ class GiteaStatusService11(http.ReporterBase): ] def createStatus( - self, project_owner, repo_name, sha, state, target_url=None, description=None, context=None + self, + project_owner, + repo_name, + sha, + state, + target_url=None, + description=None, + context=None, ): """ :param project_owner: username of the owning user or organization @@ -164,19 +173,25 @@ class GiteaStatusService11(http.ReporterBase): log.msg( 'Could not send status "{state}" for ' "{repo} at {sha}: {code} : {message}".format( - state=state, repo=repository_name, sha=sha, code=res.code, message=message + state=state, + repo=repository_name, + sha=sha, + code=res.code, + message=message, ) ) elif self.verbose: log.msg( - 'Status "{state}" sent for ' - "{repo} at {sha}.".format(state=state, repo=repository_name, sha=sha) + 'Status "{state}" sent for ' "{repo} at {sha}.".format( + state=state, repo=repository_name, sha=sha + ) ) except Exception as e: log.err( e, - 'Failed to send status "{state}" for ' - "{repo} at {sha}".format(state=state, repo=repository_name, sha=sha), + 'Failed to send status "{state}" for ' "{repo} at {sha}".format( + state=state, repo=repository_name, sha=sha + ), ) @defer.inlineCallbacks diff --git a/config/pipeline/code.py b/config/pipeline/code.py index 69d0ef2..9f42b5b 100644 --- a/config/pipeline/code.py +++ b/config/pipeline/code.py @@ -92,7 +92,9 @@ code_python_module_skip_test_names = ["sign-code-binaries"] code_tracked_branch_ids = conf.branches.code_tracked_branch_ids code_track_ids = list(code_tracked_branch_ids.keys()) code_all_platform_architectures = conf.branches.code_all_platform_architectures -code_official_platform_architectures = conf.branches.code_official_platform_architectures +code_official_platform_architectures = ( + conf.branches.code_official_platform_architectures +) code_track_pipeline_types = {} track_properties = {} @@ -211,7 +213,12 @@ scheduler_properties_patch = [ default=False, ), buildbot.plugins.util.StringParameter( - name="pull_revision", label="Pull Revision:", required=False, hide=True, size=80, default="" + name="pull_revision", + label="Pull Revision:", + required=False, + hide=True, + size=80, + default="", ), ] @@ -225,12 +232,20 @@ scheduler_properties = { @buildbot.plugins.util.renderer -def create_code_worker_command_args(props, devops_env_id, track_id, pipeline_type, step_name): +def create_code_worker_command_args( + props, devops_env_id, track_id, pipeline_type, step_name +): commit_id = pipeline.common.fetch_property(props, key="revision", default="HEAD") patch_id = pipeline.common.fetch_property(props, key="patch_id", default="") - override_branch_id = pipeline.common.fetch_property(props, key="override_branch_id", default="") - python_module = pipeline.common.fetch_property(props, key="python_module", default=False) - needs_gpu_tests = pipeline.common.fetch_property(props, key="needs_gpu_tests", default=False) + override_branch_id = pipeline.common.fetch_property( + props, key="override_branch_id", default="" + ) + python_module = pipeline.common.fetch_property( + props, key="python_module", default=False + ) + needs_gpu_tests = pipeline.common.fetch_property( + props, key="needs_gpu_tests", default=False + ) needs_gpu_binaries = pipeline.common.fetch_property( props, key="needs_gpu_binaries", default=False ) @@ -279,11 +294,12 @@ def create_code_worker_command_args(props, devops_env_id, track_id, pipeline_typ args += [step_name] - return pipeline.common.create_worker_command("code.py", devops_env_id, track_id, args) + return pipeline.common.create_worker_command( + "code.py", devops_env_id, track_id, args + ) def needs_do_code_pipeline_step(step): - build = step.build # Use this to test master steps only, otherwise we be waiting for 30 minutes needs_master_steps_only = False @@ -291,9 +307,7 @@ def needs_do_code_pipeline_step(step): is_master_step = step.name in pipeline.common.code_pipeline_master_step_names return is_master_step - worker = step.worker - worker_name = step.getWorkerName() - worker_system = worker.worker_system + step.getWorkerName() is_package_delivery_step = (step.name in code_delivery_step_names) or ( step.name in pipeline.common.code_pipeline_master_step_names @@ -337,7 +351,9 @@ class LinkMultipleFileUpload(plugins_steps.MultipleFileUpload): def create_deliver_code_binaries_step(worker_config, track_id, pipeline_type): file_size_in_mb = 500 * 1024 * 1024 - worker_source_path = pathlib.Path(f"../../../../git/blender-{track_id}/build_package") + worker_source_path = pathlib.Path( + f"../../../../git/blender-{track_id}/build_package" + ) master_dest_path = pathlib.Path( f"{worker_config.buildbot_download_folder}/{pipeline_type}" ).expanduser() @@ -359,7 +375,9 @@ def create_deliver_code_binaries_step(worker_config, track_id, pipeline_type): def create_deliver_test_results_step(worker_config, track_id, pipeline_type): file_size_in_mb = 500 * 1024 * 1024 - worker_source_path = pathlib.Path(f"../../../../git/blender-{track_id}/build_package") + worker_source_path = pathlib.Path( + f"../../../../git/blender-{track_id}/build_package" + ) master_dest_path = pathlib.Path( f"{worker_config.buildbot_download_folder}/{pipeline_type}" ).expanduser() @@ -456,9 +474,13 @@ def populate(devops_env_id): print(f"Creating [{track_id}] [code] [{pipeline_type}] pipeline steps") for step_name in step_names: if step_name == "deliver-code-binaries": - step = create_deliver_code_binaries_step(worker_config, track_id, pipeline_type) + step = create_deliver_code_binaries_step( + worker_config, track_id, pipeline_type + ) elif step_name == "deliver-test-results": - step = create_deliver_test_results_step(worker_config, track_id, pipeline_type) + step = create_deliver_test_results_step( + worker_config, track_id, pipeline_type + ) else: needs_halt_on_failure = True if step_name in code_pipeline_test_step_names: @@ -488,8 +510,14 @@ def populate(devops_env_id): pipeline_build_factory.addStep(step) for master_step_name in pipeline.common.code_pipeline_master_step_names: - master_step_command = pipeline.common.create_master_command_args.withArgs( - devops_env_id, track_id, pipeline_type, master_step_name, single_platform=True + master_step_command = ( + pipeline.common.create_master_command_args.withArgs( + devops_env_id, + track_id, + pipeline_type, + master_step_name, + single_platform=True, + ) ) # Master to archive and purge builds @@ -528,7 +556,9 @@ def populate(devops_env_id): # Create builders. for platform_architecture in code_all_platform_architectures[track_id]: - print(f"Creating [{track_id}] [{pipeline_type}] [{platform_architecture}] builders") + print( + f"Creating [{track_id}] [{pipeline_type}] [{platform_architecture}] builders" + ) worker_group_id = f"{platform_architecture}-code" worker_group_id_gpu = f"{platform_architecture}-code-gpu" @@ -544,30 +574,36 @@ def populate(devops_env_id): # Assigning different workers for different tracks, specifically Linux builders. suitable_pipeline_worker_names = pipeline_worker_names - if platform_architecture == "linux-x86_64" and devops_env_id != "LOCAL": + if ( + platform_architecture == "linux-x86_64" + and devops_env_id != "LOCAL" + ): selector = "rocky" suitable_pipeline_worker_names = [ - worker for worker in pipeline_worker_names if selector in worker + worker + for worker in pipeline_worker_names + if selector in worker ] builders += [ buildbot.plugins.util.BuilderConfig( name=pipeline_builder_name, workernames=suitable_pipeline_worker_names, - nextWorker=partial(next_worker_code, pipeline_worker_names_gpu), + nextWorker=partial( + next_worker_code, pipeline_worker_names_gpu + ), tags=pipeline_builder_tags, factory=pipeline_build_factory, ) ] - pipeline_scheduler_name = ( - f"{track_id}-code-{pipeline_type}-{platform_architecture}-triggerable" - ) + pipeline_scheduler_name = f"{track_id}-code-{pipeline_type}-{platform_architecture}-triggerable" triggerable_scheduler_names += [pipeline_scheduler_name] schedulers += [ plugins_schedulers.Triggerable( - name=pipeline_scheduler_name, builderNames=[pipeline_builder_name] + name=pipeline_scheduler_name, + builderNames=[pipeline_builder_name], ) ] @@ -590,12 +626,15 @@ def populate(devops_env_id): ) ] - pipeline_scheduler_name = f"{track_id}-code-{pipeline_type}-lint-triggerable" + pipeline_scheduler_name = ( + f"{track_id}-code-{pipeline_type}-lint-triggerable" + ) triggerable_scheduler_names += [pipeline_scheduler_name] schedulers += [ plugins_schedulers.Triggerable( - name=pipeline_scheduler_name, builderNames=[pipeline_builder_name] + name=pipeline_scheduler_name, + builderNames=[pipeline_builder_name], ) ] @@ -603,39 +642,55 @@ def populate(devops_env_id): if triggerable_scheduler_names: trigger_properties = { "python_module": buildbot.plugins.util.Property("python_module"), - "needs_full_clean": buildbot.plugins.util.Property("needs_full_clean"), + "needs_full_clean": buildbot.plugins.util.Property( + "needs_full_clean" + ), "needs_package_delivery": buildbot.plugins.util.Property( "needs_package_delivery" ), - "needs_gpu_binaries": buildbot.plugins.util.Property("needs_gpu_binaries"), - "needs_gpu_tests": buildbot.plugins.util.Property("needs_gpu_tests"), - "needs_skip_tests": buildbot.plugins.util.Property("needs_skip_tests"), + "needs_gpu_binaries": buildbot.plugins.util.Property( + "needs_gpu_binaries" + ), + "needs_gpu_tests": buildbot.plugins.util.Property( + "needs_gpu_tests" + ), + "needs_skip_tests": buildbot.plugins.util.Property( + "needs_skip_tests" + ), "platform_architectures": buildbot.plugins.util.Property( "platform_architectures" ), } if pipeline_type == "patch": - trigger_properties["patch_id"] = buildbot.plugins.util.Property("patch_id") - trigger_properties["revision"] = buildbot.plugins.util.Property("revision") - trigger_properties["build_configuration"] = buildbot.plugins.util.Property( - "build_configuration" + trigger_properties["patch_id"] = buildbot.plugins.util.Property( + "patch_id" + ) + trigger_properties["revision"] = buildbot.plugins.util.Property( + "revision" + ) + trigger_properties["build_configuration"] = ( + buildbot.plugins.util.Property("build_configuration") ) trigger_factory.addStep( plugins_steps.SetProperties( - name="get-revision", properties=gitea.blender.get_patch_revision + name="get-revision", + properties=gitea.blender.get_patch_revision, ) ) elif pipeline_type == "experimental": - trigger_properties["override_branch_id"] = buildbot.plugins.util.Property( - "override_branch_id" + trigger_properties["override_branch_id"] = ( + buildbot.plugins.util.Property("override_branch_id") ) - trigger_properties["revision"] = buildbot.plugins.util.Property("revision") - trigger_properties["build_configuration"] = buildbot.plugins.util.Property( - "build_configuration" + trigger_properties["revision"] = buildbot.plugins.util.Property( + "revision" + ) + trigger_properties["build_configuration"] = ( + buildbot.plugins.util.Property("build_configuration") ) trigger_factory.addStep( plugins_steps.SetProperties( - name="get-revision", properties=gitea.blender.get_branch_revision + name="get-revision", + properties=gitea.blender.get_branch_revision, ) ) @@ -650,7 +705,9 @@ def populate(devops_env_id): ) ) - coordinator_builder_name = f"{track_id}-code-{pipeline_type}-coordinator" + coordinator_builder_name = ( + f"{track_id}-code-{pipeline_type}-coordinator" + ) builder_tags = coordinator_builder_name.split("-") builders += [ @@ -662,7 +719,9 @@ def populate(devops_env_id): ) ] - coordinator_scheduler_name = f"{track_id}-code-{pipeline_type}-coordinator-force" + coordinator_scheduler_name = ( + f"{track_id}-code-{pipeline_type}-coordinator-force" + ) schedulers += [ plugins_schedulers.ForceScheduler( name=coordinator_scheduler_name, @@ -701,7 +760,8 @@ def populate(devops_env_id): } change_filter = buildbot.plugins.util.ChangeFilter( - project=["blender.git"], branch=code_tracked_branch_ids[track_id] + project=["blender.git"], + branch=code_tracked_branch_ids[track_id], ) schedulers += [ plugins_schedulers.SingleBranchScheduler( @@ -724,7 +784,9 @@ def populate(devops_env_id): "needs_package_delivery": True, "needs_gpu_binaries": True, "build_configuration": "release", - "platform_architectures": code_all_platform_architectures[track_id], + "platform_architectures": code_all_platform_architectures[ + track_id + ], } nightly_codebases = { "blender.git": { diff --git a/config/pipeline/code_store.py b/config/pipeline/code_store.py index 35aaf09..64df456 100644 --- a/config/pipeline/code_store.py +++ b/config/pipeline/code_store.py @@ -41,7 +41,9 @@ scheduler_properties = [ def create_deliver_binaries_windows_step(worker_config, track_id, pipeline_type): # Create step for uploading msix to download.blender.org. file_size_in_mb = 500 * 1024 * 1024 - worker_source_path = pathlib.Path(f"../../../../git/blender-{track_id}/build_package") + worker_source_path = pathlib.Path( + f"../../../../git/blender-{track_id}/build_package" + ) master_dest_path = pathlib.Path( f"{worker_config.buildbot_download_folder}/{pipeline_type}" ).expanduser() @@ -122,8 +124,14 @@ def populate(devops_env_id): pipeline_build_factory.addStep(step) for master_step_name in pipeline.common.code_pipeline_master_step_names: - master_step_command = pipeline.common.create_master_command_args.withArgs( - devops_env_id, track_id, pipeline_type, master_step_name, single_platform=False + master_step_command = ( + pipeline.common.create_master_command_args.withArgs( + devops_env_id, + track_id, + pipeline_type, + master_step_name, + single_platform=False, + ) ) # Master to archive and purge builds diff --git a/config/pipeline/common.py b/config/pipeline/common.py index dfd7124..14bb9fc 100644 --- a/config/pipeline/common.py +++ b/config/pipeline/common.py @@ -81,9 +81,10 @@ def create_worker_command(script, devops_env_id, track_id, args): def create_master_command_args( props, devops_env_id, track_id, pipeline_type, step_name, single_platform ): - build_configuration = fetch_property(props, key="build_configuration", default="release") + build_configuration = fetch_property( + props, key="build_configuration", default="release" + ) python_module = fetch_property(props, key="python_module", default=False) - python_module_string = "true" if python_module else "false" args = [ "--pipeline-type", @@ -181,8 +182,12 @@ def create_pipeline( platform_worker_names = conf.machines.fetch_platform_worker_names(devops_env_id) local_worker_names = conf.machines.fetch_local_worker_names() - needs_incremental_schedulers = incremental_properties is not None and devops_env_id in ["PROD"] - needs_nightly_schedulers = nightly_properties is not None and devops_env_id in ["PROD"] + needs_incremental_schedulers = ( + incremental_properties is not None and devops_env_id in ["PROD"] + ) + needs_nightly_schedulers = nightly_properties is not None and devops_env_id in [ + "PROD" + ] track_ids = tracked_branch_ids.keys() print(f"*** Creating [{artifact_id}] pipeline") @@ -297,7 +302,9 @@ def create_pipeline( ] if needs_incremental_schedulers and (track_id in track_ids): - incremental_scheduler_name = f"{track_id}-{artifact_id}-coordinator-incremental" + incremental_scheduler_name = ( + f"{track_id}-{artifact_id}-coordinator-incremental" + ) change_filter = buildbot.plugins.util.ChangeFilter( project=[codebase], branch=tracked_branch_ids[track_id] ) diff --git a/config/pipeline/doc_manual.py b/config/pipeline/doc_manual.py index 4cdd619..9b846c4 100644 --- a/config/pipeline/doc_manual.py +++ b/config/pipeline/doc_manual.py @@ -37,7 +37,10 @@ def populate(devops_env_id): ["linux-x86_64-general", "linux-x86_64-general"], variation_property="doc_format", variations=["html", "epub"], - incremental_properties={"needs_package_delivery": True, "needs_all_locales": False}, + incremental_properties={ + "needs_package_delivery": True, + "needs_all_locales": False, + }, nightly_properties={"needs_package_delivery": True, "needs_all_locales": True}, tree_stable_timer_in_seconds=15 * 60, do_step_if=pipeline.common.needs_do_doc_pipeline_step, diff --git a/config/worker/archive.py b/config/worker/archive.py index 6cfcaed..fc4f300 100755 --- a/config/worker/archive.py +++ b/config/worker/archive.py @@ -52,7 +52,8 @@ class ArchiveBuilder(worker.utils.Builder): def file_age_in_days(file_path: pathlib.Path) -> float: try: file_path_mtime = os.path.getmtime(file_path) - except: + except (FileNotFoundError, PermissionError) as e: + print(f"Error accessing file: {e}") return 0.0 age_in_seconds = time.time() - file_path_mtime @@ -60,7 +61,6 @@ def file_age_in_days(file_path: pathlib.Path) -> float: def parse_build_info(file_path: pathlib.Path) -> Optional[Dict]: - file_name = file_path.name matches = re.match(package_file_pattern, file_path.name) if not matches: return None @@ -92,8 +92,6 @@ def fetch_current_builds( short_version: Optional[str] = None, all_platforms: bool = False, ) -> Dict[Any, List[Any]]: - app_id = "bpy" if builder.python_module else "blender" - worker_config = builder.get_worker_config() download_path = worker_config.buildbot_download_folder pipeline_build_path = download_path / pipeline_type @@ -109,11 +107,16 @@ def fetch_current_builds( build_info = parse_build_info(file_path) if not build_info: continue - if short_version and not build_info["version_id"].startswith(short_version + "."): + if short_version and not build_info["version_id"].startswith( + short_version + "." + ): continue if not all_platforms: - if builder.architecture and build_info["architecture"] != builder.architecture: + if ( + builder.architecture + and build_info["architecture"] != builder.architecture + ): continue if builder.platform_id and build_info["platform_id"] != builder.platform_id: continue @@ -174,9 +177,13 @@ def deduplicate(builder: ArchiveBuilder) -> None: short_version = branches_config.track_major_minor_versions[builder.track_id] if not short_version: - raise BaseException(f"Missing version in [{builder.pipeline_type}] builds, aborting") + raise BaseException( + f"Missing version in [{builder.pipeline_type}] builds, aborting" + ) - build_groups = fetch_current_builds(builder, builder.pipeline_type, short_version=short_version) + build_groups = fetch_current_builds( + builder, builder.pipeline_type, short_version=short_version + ) print( f"Deduplicating [{builder.pipeline_type}] builds for [{short_version}] [{builder.build_configuration}] [{builder.platform_id}] [{builder.architecture}]" @@ -217,14 +224,20 @@ def purge(builder: ArchiveBuilder) -> None: if pipeline_type != "daily": print("=" * 120) print(f"Deduplicating [{pipeline_type}] builds") - build_groups = fetch_current_builds(builder, pipeline_type, all_platforms=True) + build_groups = fetch_current_builds( + builder, pipeline_type, all_platforms=True + ) for key, build_group in build_groups.items(): print("") print("--- Group: " + str(key)) - archive_build_group(build_group, builds_retention_in_days, dry_run=dry_run) + archive_build_group( + build_group, builds_retention_in_days, dry_run=dry_run + ) print("=" * 120) - print(f"Purging [{pipeline_type}] builds older than [{builds_retention_in_days}] days") + print( + f"Purging [{pipeline_type}] builds older than [{builds_retention_in_days}] days" + ) for file_path in fetch_purge_builds(builder, pipeline_type, "archive"): if file_age_in_days(file_path) < builds_retention_in_days: continue @@ -237,7 +250,9 @@ def purge(builder: ArchiveBuilder) -> None: worker.utils.remove_file(checksum_file_path, dry_run=dry_run) print("=" * 120) - print(f"Purging [{pipeline_type}] tests older than [{tests_retention_in_days}] days") + print( + f"Purging [{pipeline_type}] tests older than [{tests_retention_in_days}] days" + ) for file_path in fetch_purge_builds(builder, pipeline_type, "tests"): if file_age_in_days(file_path) < tests_retention_in_days: continue @@ -256,7 +271,7 @@ def generate_test_data(builder: ArchiveBuilder) -> None: branches_config = builder.get_branches_config() short_version = branches_config.track_major_minor_versions[builder.track_id] - version = short_version + ".0" + short_version + ".0" app_id = "bpy" if builder.python_module else "blender" commit_hashes = ["1ddf858", "03a2a53"] @@ -319,9 +334,15 @@ if __name__ == "__main__": parser = worker.utils.create_argument_parser(steps=steps) parser.add_argument( - "--pipeline-type", default="daily", type=str, choices=pipeline_types, required=False + "--pipeline-type", + default="daily", + type=str, + choices=pipeline_types, + required=False, + ) + parser.add_argument( + "--platform-id", default="", type=str, choices=platforms, required=False ) - parser.add_argument("--platform-id", default="", type=str, choices=platforms, required=False) parser.add_argument( "--architecture", default="", type=str, choices=architectures, required=False ) diff --git a/config/worker/blender/__init__.py b/config/worker/blender/__init__.py index 8b31b65..c2fadff 100644 --- a/config/worker/blender/__init__.py +++ b/config/worker/blender/__init__.py @@ -9,9 +9,6 @@ import pathlib import re import subprocess -from collections import OrderedDict -from typing import Callable, Any - import worker.utils @@ -32,7 +29,9 @@ class CodeBuilder(worker.utils.Builder): self.architecture = args.architecture if self.platform == "darwin": - self.build_dir = track_path / f"build_{self.architecture}_{self.build_configuration}" + self.build_dir = ( + track_path / f"build_{self.architecture}_{self.build_configuration}" + ) else: self.build_dir = track_path / f"build_{self.build_configuration}" @@ -47,7 +46,9 @@ class CodeBuilder(worker.utils.Builder): worker.utils.remove_dir(self.build_doc_path) # Call command with in compiler environment. - def call(self, cmd: worker.utils.CmdSequence, env: worker.utils.CmdEnvironment = None) -> int: + def call( + self, cmd: worker.utils.CmdSequence, env: worker.utils.CmdEnvironment = None + ) -> int: cmd_prefix: worker.utils.CmdList = [] if self.platform == "darwin": @@ -57,11 +58,16 @@ class CodeBuilder(worker.utils.Builder): xcode_version = xcode.get("version", None) if xcode else None if xcode_version: - developer_dir = f"/Applications/Xcode-{xcode_version}.app/Contents/Developer" + developer_dir = ( + f"/Applications/Xcode-{xcode_version}.app/Contents/Developer" + ) else: developer_dir = "/Applications/Xcode.app/Contents/Developer" - if self.service_env_id == "LOCAL" and not pathlib.Path(developer_dir).exists(): + if ( + self.service_env_id == "LOCAL" + and not pathlib.Path(developer_dir).exists() + ): worker.utils.warning( f"Skip using non-existent {developer_dir} in LOCAL service environment" ) @@ -84,7 +90,9 @@ class CodeBuilder(worker.utils.Builder): return worker.utils.call(cmd_prefix + list(cmd), env=env) def pipeline_config(self) -> dict: - config_file_path = self.code_path / "build_files" / "config" / "pipeline_config.json" + config_file_path = ( + self.code_path / "build_files" / "config" / "pipeline_config.json" + ) if not config_file_path.exists(): config_file_path = config_file_path.with_suffix(".yaml") if not config_file_path.exists(): @@ -116,7 +124,9 @@ class CodeBuilder(worker.utils.Builder): # CMake goes first to avoid using chocolaty cpack command. worker.utils.info("Setting CMake path") - os.environ["PATH"] = "C:\\Program Files\\CMake\\bin" + os.pathsep + os.environ["PATH"] + os.environ["PATH"] = ( + "C:\\Program Files\\CMake\\bin" + os.pathsep + os.environ["PATH"] + ) worker.utils.info("Setting VC Tools env variables") windows_build_version = "10.0.19041.0" @@ -126,7 +136,9 @@ class CodeBuilder(worker.utils.Builder): + os.environ["PATH"] ) os.environ["PATH"] = ( - "C:\\Program Files (x86)\\WiX Toolset v3.11\\bin" + os.pathsep + os.environ["PATH"] + "C:\\Program Files (x86)\\WiX Toolset v3.11\\bin" + + os.pathsep + + os.environ["PATH"] ) if self.architecture == "arm64": @@ -140,7 +152,9 @@ class CodeBuilder(worker.utils.Builder): ) vs_tool_install_dir_suffix = "\\bin\\Hostx64\\x64" - vcvars_output = subprocess.check_output([vs_build_tool_path, "&&", "set"], shell=True) + vcvars_output = subprocess.check_output( + [vs_build_tool_path, "&&", "set"], shell=True + ) vcvars_text = vcvars_output.decode("utf-8", "ignore") for line in vcvars_text.splitlines(): diff --git a/config/worker/blender/benchmark.py b/config/worker/blender/benchmark.py index 5280e9b..32d0863 100644 --- a/config/worker/blender/benchmark.py +++ b/config/worker/blender/benchmark.py @@ -14,7 +14,6 @@ import worker.blender import worker.utils - def create_upload( builder: worker.blender.CodeBuilder, benchmark_path: pathlib.Path, revision: str ) -> None: diff --git a/config/worker/blender/bundle_dmg.py b/config/worker/blender/bundle_dmg.py index cee3a33..12c1338 100644 --- a/config/worker/blender/bundle_dmg.py +++ b/config/worker/blender/bundle_dmg.py @@ -95,7 +95,9 @@ def estimate_dmg_size(app_bundles: typing.List[pathlib.Path]) -> int: return app_bundles_size + _extra_dmg_size_in_bytes -def copy_app_bundles(app_bundles: typing.List[pathlib.Path], dir_path: pathlib.Path) -> None: +def copy_app_bundles( + app_bundles: typing.List[pathlib.Path], dir_path: pathlib.Path +) -> None: """ Copy all bundles to a given directory @@ -122,7 +124,9 @@ def get_main_app_bundle(app_bundles: typing.List[pathlib.Path]) -> pathlib.Path: def create_dmg_image( - app_bundles: typing.List[pathlib.Path], dmg_file_path: pathlib.Path, volume_name: str + app_bundles: typing.List[pathlib.Path], + dmg_file_path: pathlib.Path, + volume_name: str, ) -> None: """ Create DMG disk image and put app bundles in it @@ -134,7 +138,9 @@ def create_dmg_image( worker.utils.remove_file(dmg_file_path) temp_content_path = tempfile.TemporaryDirectory(prefix="blender-dmg-content-") - worker.utils.info(f"Preparing directory with app bundles for the DMG [{temp_content_path}]") + worker.utils.info( + f"Preparing directory with app bundles for the DMG [{temp_content_path}]" + ) with temp_content_path as content_dir_str: # Copy all bundles to a clean directory. content_dir_path = pathlib.Path(content_dir_str) @@ -236,13 +242,17 @@ def eject_volume(volume_name: str) -> None: if tokens[1] != "on": continue if device: - raise Exception(f"Multiple devices found for mounting point [{mount_directory}]") + raise Exception( + f"Multiple devices found for mounting point [{mount_directory}]" + ) device = tokens[0] if not device: raise Exception(f"No device found for mounting point [{mount_directory}]") - worker.utils.info(f"[{mount_directory}] is mounted as device [{device}], ejecting...") + worker.utils.info( + f"[{mount_directory}] is mounted as device [{device}], ejecting..." + ) command = ["diskutil", "eject", device] worker.utils.call(command) @@ -297,7 +307,9 @@ def run_applescript_file_path( needs_run_applescript = True if not needs_run_applescript: - worker.utils.info(f"Having issues with apple script on [{architecture}], skipping !") + worker.utils.info( + f"Having issues with apple script on [{architecture}], skipping !" + ) return temp_script_file_path = tempfile.NamedTemporaryFile(mode="w", suffix=".applescript") @@ -316,8 +328,12 @@ def run_applescript_file_path( if not background_image_file_path: continue else: - background_image_short = f".background:{background_image_file_path.name}" - line = re.sub('to file ".*"', f'to file "{background_image_short}"', line) + background_image_short = ( + f".background:{background_image_file_path.name}" + ) + line = re.sub( + 'to file ".*"', f'to file "{background_image_short}"', line + ) line = line.replace("blender.app", main_app_bundle.name) stripped_line = line.rstrip("\r\n") worker.utils.info(f"line={stripped_line}") @@ -343,7 +359,9 @@ def run_applescript_file_path( time.sleep(5) -def compress_dmg(writable_dmg_file_path: pathlib.Path, final_dmg_file_path: pathlib.Path) -> None: +def compress_dmg( + writable_dmg_file_path: pathlib.Path, final_dmg_file_path: pathlib.Path +) -> None: """ Compress temporary read-write DMG """ @@ -469,5 +487,9 @@ def bundle( worker.utils.info(f"Will produce DMG [{dmg_file_path.name}]") create_final_dmg( - app_bundles, dmg_file_path, background_image_file_path, volume_name, applescript_file_path + app_bundles, + dmg_file_path, + background_image_file_path, + volume_name, + applescript_file_path, ) diff --git a/config/worker/blender/compile.py b/config/worker/blender/compile.py index 07ff990..faba4bf 100644 --- a/config/worker/blender/compile.py +++ b/config/worker/blender/compile.py @@ -41,15 +41,21 @@ def fetch_ideal_cpu_count(estimate_core_memory_in_mb: int) -> int: worker.utils.info(f"usable_memory_in_bytes={usable_memory_in_bytes}") estimate_memory_per_code_in_bytes = estimate_core_memory_in_mb * 1024 * 1024 - worker.utils.info(f"estimate_memory_per_code_in_bytes={estimate_memory_per_code_in_bytes}") + worker.utils.info( + f"estimate_memory_per_code_in_bytes={estimate_memory_per_code_in_bytes}" + ) - capable_cpu_count = int(total_memory_in_bytes / estimate_memory_per_code_in_bytes) + capable_cpu_count = int( + total_memory_in_bytes / estimate_memory_per_code_in_bytes + ) worker.utils.info(f"capable_cpu_count={capable_cpu_count}") min_cpu_count = min(total_cpu_count, capable_cpu_count) worker.utils.info(f"min_cpu_count={min_cpu_count}") - ideal_cpu_count = min_cpu_count if min_cpu_count <= 8 else min_cpu_count - spare_cpu_count + ideal_cpu_count = ( + min_cpu_count if min_cpu_count <= 8 else min_cpu_count - spare_cpu_count + ) worker.utils.info(f"ideal_cpu_count={ideal_cpu_count}") return ideal_cpu_count @@ -88,9 +94,13 @@ def get_cmake_options(builder: worker.blender.CodeBuilder) -> worker.utils.CmdSe platform_config_file_path = "build_files/buildbot/config/blender_windows.cmake" if platform_config_file_path: - worker.utils.info(f'Trying platform-specific buildbot configuration "{platform_config_file_path}"') + worker.utils.info( + f'Trying platform-specific buildbot configuration "{platform_config_file_path}"' + ) if (Path(builder.blender_dir) / platform_config_file_path).exists(): - worker.utils.info(f'Using platform-specific buildbot configuration "{platform_config_file_path}"') + worker.utils.info( + f'Using platform-specific buildbot configuration "{platform_config_file_path}"' + ) config_file_path = platform_config_file_path else: worker.utils.info(f'Using generic buildbot configuration "{config_file_path}"') @@ -145,13 +155,17 @@ def get_cmake_options(builder: worker.blender.CodeBuilder) -> worker.utils.CmdSe vc_tool_install_path = pathlib.PureWindowsPath(vc_tools_install_dir) if builder.architecture == "arm64": - compiler_file_path="C:/Program Files/LLVM/bin/clang-cl.exe" - compiler_file_path="C:/Program Files/LLVM/bin/clang-cl.exe" - linker_file_path="C:/Program Files/LLVM/bin/lld-link.exe" + compiler_file_path = "C:/Program Files/LLVM/bin/clang-cl.exe" + compiler_file_path = "C:/Program Files/LLVM/bin/clang-cl.exe" + linker_file_path = "C:/Program Files/LLVM/bin/lld-link.exe" else: vs_tool_install_dir_suffix = "bin/Hostx64/x64" - compiler_file_path = str(vc_tool_install_path / f"{vs_tool_install_dir_suffix}/cl.exe") - linker_file_path = str(vc_tool_install_path / f"{vs_tool_install_dir_suffix}/link.exe") + compiler_file_path = str( + vc_tool_install_path / f"{vs_tool_install_dir_suffix}/cl.exe" + ) + linker_file_path = str( + vc_tool_install_path / f"{vs_tool_install_dir_suffix}/link.exe" + ) options += ["-G", "Ninja"] # -DWITH_WINDOWS_SCCACHE=On @@ -194,7 +208,9 @@ def get_cmake_options(builder: worker.blender.CodeBuilder) -> worker.utils.CmdSe for cmake_key in cmake_overrides.keys(): for restricted_key_pattern in restricted_key_patterns: if restricted_key_pattern in cmake_key: - raise Exception(f"CMake key [{cmake_key}] cannot be overriden, aborting") + raise Exception( + f"CMake key [{cmake_key}] cannot be overriden, aborting" + ) for cmake_key, cmake_value in cmake_overrides.items(): options += [f"-D{cmake_key}={cmake_value}"] @@ -238,7 +254,9 @@ def get_cmake_options(builder: worker.blender.CodeBuilder) -> worker.utils.CmdSe ocloc_version = "dev_01" options += [f"-DHIP_ROOT_DIR=C:/ProgramData/AMD/HIP/hip_sdk_{hip_version}"] options += ["-DHIP_PERL_DIR=C:/ProgramData/AMD/HIP/strawberry/perl/bin"] - options += [f"-DOCLOC_INSTALL_DIR=C:/ProgramData/Intel/ocloc/ocloc_{ocloc_version}"] + options += [ + f"-DOCLOC_INSTALL_DIR=C:/ProgramData/Intel/ocloc/ocloc_{ocloc_version}" + ] elif builder.platform == "linux": # CUDA on Linux options += [f"-DWITH_CYCLES_CUDA_BINARIES={with_gpu_binaries_state}"] @@ -300,22 +318,20 @@ def get_cmake_options(builder: worker.blender.CodeBuilder) -> worker.utils.CmdSe options += [ f"-DHIPRT_ROOT_DIR:PATH={hiprt_base_dir}/hiprtsdk-{hiprt_version}/hiprt{hiprt_version}" ] - # Enable option to verify enabled libraries and features did not get disabled. options += ["-DWITH_STRICT_BUILD_OPTIONS=ON"] - needs_cuda_compile = builder.needs_gpu_binaries if builder.needs_gpu_binaries: try: cuda10_version = buildbotConfig["cuda10"]["version"] - except: + except KeyError: cuda10_version = buildbotConfig["sdks"]["cuda10"]["version"] cuda10_folder_version = ".".join(cuda10_version.split(".")[:2]) try: cuda11_version = buildbotConfig["cuda11"]["version"] - except: + except KeyError: cuda11_version = buildbotConfig["sdks"]["cuda11"]["version"] cuda11_folder_version = ".".join(cuda11_version.split(".")[:2]) @@ -324,7 +340,7 @@ def get_cmake_options(builder: worker.blender.CodeBuilder) -> worker.utils.CmdSe cuda12_version = buildbotConfig["cuda12"]["version"] cuda12_folder_version = ".".join(cuda12_version.split(".")[:2]) have_cuda12 = True - except: + except KeyError: have_cuda12 = False if builder.platform == "windows" and builder.architecture != "arm64": @@ -408,7 +424,9 @@ def get_cmake_options(builder: worker.blender.CodeBuilder) -> worker.utils.CmdSe else: # Use new CMake option. options += [f"-DCUDA10_NVCC_EXECUTABLE:STRING={cuda10_file_path}"] - options += ["-DCUDA_HOST_COMPILER=/opt/rh/devtoolset-8/root/usr/bin/gcc"] + options += [ + "-DCUDA_HOST_COMPILER=/opt/rh/devtoolset-8/root/usr/bin/gcc" + ] # CUDA 11 or 12. if have_cuda12: @@ -428,7 +446,9 @@ def get_cmake_options(builder: worker.blender.CodeBuilder) -> worker.utils.CmdSe def clean_directories(builder: worker.blender.CodeBuilder) -> None: - worker.utils.info(f"Cleaning directory [{builder.install_dir})] from the previous run") + worker.utils.info( + f"Cleaning directory [{builder.install_dir})] from the previous run" + ) worker.utils.remove_dir(builder.install_dir) os.makedirs(builder.build_dir, exist_ok=True) @@ -452,7 +472,9 @@ def cmake_configure(builder: worker.blender.CodeBuilder) -> None: worker.utils.info("CMake configure options") cmake_options = get_cmake_options(builder) - cmd = ["cmake", "-S", builder.blender_dir, "-B", builder.build_dir] + list(cmake_options) + cmd = ["cmake", "-S", builder.blender_dir, "-B", builder.build_dir] + list( + cmake_options + ) builder.call(cmd) # This hack does not work as expected, since cmake cache is the always updated, we end up recompiling on each compile step, code, gpu and install @@ -465,7 +487,10 @@ def cmake_configure(builder: worker.blender.CodeBuilder) -> None: fout = open(tmp_cmake_cache_file_path, "wt") for line in fin: # worker.utils.info(line) - if "OpenMP_pthread_LIBRARY:FILEPATH=OpenMP_pthread_LIBRARY-NOTFOUND" in line: + if ( + "OpenMP_pthread_LIBRARY:FILEPATH=OpenMP_pthread_LIBRARY-NOTFOUND" + in line + ): worker.utils.warning( "Replacing [OpenMP_pthread_LIBRARY-NOTFOUND] to [/usr/lib64/libpthread.a]" ) @@ -489,7 +514,9 @@ def cmake_build(builder: worker.blender.CodeBuilder, do_install: bool) -> None: else: estimate_gpu_memory_in_mb = 6000 - estimate_core_memory_in_mb = estimate_gpu_memory_in_mb if builder.needs_gpu_binaries else 1000 + estimate_core_memory_in_mb = ( + estimate_gpu_memory_in_mb if builder.needs_gpu_binaries else 1000 + ) ideal_cpu_count = fetch_ideal_cpu_count(estimate_core_memory_in_mb) # Enable verbose building to make ninja to output more often. diff --git a/config/worker/blender/lint.py b/config/worker/blender/lint.py index 5c0afcd..c34c8e7 100644 --- a/config/worker/blender/lint.py +++ b/config/worker/blender/lint.py @@ -21,7 +21,7 @@ def make_format(builder: worker.blender.CodeBuilder) -> bool: # Run format if builder.platform == "windows": builder.call(["make.bat", "format"]) - else: + else: builder.call(["make", "-f", "GNUmakefile", "format"]) # Check for changes diff --git a/config/worker/blender/pack.py b/config/worker/blender/pack.py index a39a6d8..e74bd95 100644 --- a/config/worker/blender/pack.py +++ b/config/worker/blender/pack.py @@ -82,7 +82,10 @@ def create_tar_xz(src: pathlib.Path, dest: pathlib.Path, package_name: str) -> N for root, dirs, files in os.walk(src): package_root = os.path.join(package_name, root[ln:]) flist.extend( - [(os.path.join(root, file), os.path.join(package_root, file)) for file in files] + [ + (os.path.join(root, file), os.path.join(package_root, file)) + for file in files + ] ) # Set UID/GID of archived files to 0, otherwise they'd be owned by whatever @@ -112,7 +115,7 @@ def cleanup_files(dirpath: pathlib.Path, extension: str) -> None: def pack_mac(builder: worker.blender.CodeBuilder) -> None: - version_info = worker.blender.version.VersionInfo(builder) + worker.blender.version.VersionInfo(builder) os.chdir(builder.build_dir) cleanup_files(builder.package_dir, ".dmg") @@ -121,15 +124,24 @@ def pack_mac(builder: worker.blender.CodeBuilder) -> None: package_file_name = package_name + ".dmg" package_file_path = builder.package_dir / package_file_name - applescript_file_path = pathlib.Path(__file__).parent.resolve() / "blender.applescript" - background_image_file_path = builder.blender_dir / "release" / "darwin" / "background.tif" + applescript_file_path = ( + pathlib.Path(__file__).parent.resolve() / "blender.applescript" + ) + background_image_file_path = ( + builder.blender_dir / "release" / "darwin" / "background.tif" + ) worker.blender.bundle_dmg.bundle( - builder.install_dir, package_file_path, applescript_file_path, background_image_file_path + builder.install_dir, + package_file_path, + applescript_file_path, + background_image_file_path, ) # Sign - worker.blender.sign.sign_darwin_files(builder, [package_file_path], "entitlements.plist") + worker.blender.sign.sign_darwin_files( + builder, [package_file_path], "entitlements.plist" + ) # Notarize worker_config = builder.get_worker_config() @@ -169,7 +181,14 @@ def pack_mac(builder: worker.blender.CodeBuilder) -> None: # Show logs worker.utils.call( - ["xcrun", "notarytool", "log", "--keychain-profile", keychain_profile, request_id], + [ + "xcrun", + "notarytool", + "log", + "--keychain-profile", + keychain_profile, + request_id, + ], retry_count=5, retry_wait_time=10.0, ) @@ -262,14 +281,17 @@ def pack_win(builder: worker.blender.CodeBuilder, pack_format: str) -> None: / "ZIP" / f"{final_package_file_name}" ) - worker.utils.info(f"Moving [{source_cpack_file_path}] to [{final_package_file_path}]") + worker.utils.info( + f"Moving [{source_cpack_file_path}] to [{final_package_file_path}]" + ) os.rename(source_cpack_file_path, final_package_file_path) else: os.rename(bogus_cpack_file_path, final_package_file_path) version_info = worker.blender.version.VersionInfo(builder) description = f"Blender {version_info.version}" - worker.blender.sign.sign_windows_files(builder.service_env_id, [final_package_file_path], - description=description) + worker.blender.sign.sign_windows_files( + builder.service_env_id, [final_package_file_path], description=description + ) generate_file_hash(final_package_file_path) @@ -289,9 +311,13 @@ def pack_linux(builder: worker.blender.CodeBuilder) -> None: py_target = builder.install_dir / version_info.short_version if not os.path.exists(py_target): # Support older format and current issue with 3.00 - py_target = builder.install_dir / ("%d.%02d" % (version_info.major, version_info.minor)) + py_target = builder.install_dir / ( + "%d.%02d" % (version_info.major, version_info.minor) + ) - worker.utils.call(["find", py_target, "-iname", "*.so", "-exec", "strip", "-s", "{}", ";"]) + worker.utils.call( + ["find", py_target, "-iname", "*.so", "-exec", "strip", "-s", "{}", ";"] + ) package_name = get_package_name(builder) package_file_name = f"{package_name}.tar.xz" diff --git a/config/worker/blender/sign.py b/config/worker/blender/sign.py index 9746c00..7933b76 100644 --- a/config/worker/blender/sign.py +++ b/config/worker/blender/sign.py @@ -22,7 +22,7 @@ def sign_windows_files( worker_config = conf.worker.get_config(service_env_id) # TODO: Rotate them if first 1 fails - timeserver = worker_config.sign_code_windows_time_servers[0] + worker_config.sign_code_windows_time_servers[0] server_url = worker_config.sign_code_windows_server_url if not certificate_id: certificate_id = worker_config.sign_code_windows_certificate @@ -50,7 +50,9 @@ def sign_windows_files( for i in range(0, len(file_paths), chunk_size): file_chunks = file_paths[i : i + chunk_size] - worker.utils.call(list(cmd) + list(file_chunks), retry_count=retry_count, dry_run=dry_run) + worker.utils.call( + list(cmd) + list(file_chunks), retry_count=retry_count, dry_run=dry_run + ) def sign_windows(service_env_id: str, install_path: pathlib.Path) -> None: @@ -97,9 +99,11 @@ def sign_windows(service_env_id: str, install_path: pathlib.Path) -> None: def sign_darwin_files( builder: worker.blender.CodeBuilder, file_paths: Sequence[pathlib.Path], - entitlements_file_name: str + entitlements_file_name: str, ) -> None: - entitlements_path = builder.code_path / "release" / "darwin" / entitlements_file_name + entitlements_path = ( + builder.code_path / "release" / "darwin" / entitlements_file_name + ) if not entitlements_path.exists(): raise Exception(f"File {entitlements_path} not found, aborting") @@ -128,7 +132,9 @@ def sign_darwin_files( # Remove signature if file_path.suffix != ".dmg": worker.utils.call( - ["codesign", "--remove-signature", file_path], exit_on_error=False, dry_run=dry_run + ["codesign", "--remove-signature", file_path], + exit_on_error=False, + dry_run=dry_run, ) # Add signature @@ -163,11 +169,15 @@ def sign_darwin(builder: worker.blender.CodeBuilder) -> None: sign_darwin_files(builder, list(sign_path.rglob("*")), "entitlements.plist") # Thumbnailer app extension. - thumbnailer_appex_path = bundle_path / "Contents" / "PlugIns" / "blender-thumbnailer.appex" + thumbnailer_appex_path = ( + bundle_path / "Contents" / "PlugIns" / "blender-thumbnailer.appex" + ) if thumbnailer_appex_path.exists(): sign_path = thumbnailer_appex_path / "Contents" / "MacOS" worker.utils.info(f"Collecting files to process in {sign_path}") - sign_darwin_files(builder, list(sign_path.rglob("*")), "thumbnailer_entitlements.plist") + sign_darwin_files( + builder, list(sign_path.rglob("*")), "thumbnailer_entitlements.plist" + ) # Shared librarys and Python sign_path = bundle_path / "Contents" / "Resources" diff --git a/config/worker/blender/test.py b/config/worker/blender/test.py index 569f909..1fa8990 100644 --- a/config/worker/blender/test.py +++ b/config/worker/blender/test.py @@ -36,7 +36,9 @@ def package_for_upload(builder: worker.blender.CodeBuilder, success: bool) -> No package_filename = "tests-" + worker.blender.pack.get_package_name(builder) package_filepath = package_tests_dir / package_filename shutil.copytree(build_tests_dir, package_filepath) - shutil.make_archive(str(package_filepath), "zip", package_tests_dir, package_filename) + shutil.make_archive( + str(package_filepath), "zip", package_tests_dir, package_filename + ) shutil.rmtree(package_filepath) # Always upload unpacked folder for main and release tracks, diff --git a/config/worker/blender/update.py b/config/worker/blender/update.py index cb5909d..ae7ab93 100644 --- a/config/worker/blender/update.py +++ b/config/worker/blender/update.py @@ -32,8 +32,12 @@ def update(builder: worker.blender.CodeBuilder) -> None: make_update_text = make_update_path.read_text() if "def svn_update" in make_update_text: - worker.utils.error("Can't build branch or pull request that uses Subversion libraries.") - worker.utils.error("Merge with latest main or release branch to use Git LFS libraries.") + worker.utils.error( + "Can't build branch or pull request that uses Subversion libraries." + ) + worker.utils.error( + "Merge with latest main or release branch to use Git LFS libraries." + ) sys.exit(1) # Run make update diff --git a/config/worker/blender/version.py b/config/worker/blender/version.py index 24e7fca..c42bb61 100644 --- a/config/worker/blender/version.py +++ b/config/worker/blender/version.py @@ -14,12 +14,18 @@ class VersionInfo: # Get version information buildinfo_h = builder.build_dir / "source" / "creator" / "buildinfo.h" blender_h = ( - builder.blender_dir / "source" / "blender" / "blenkernel" / "BKE_blender_version.h" + builder.blender_dir + / "source" + / "blender" + / "blenkernel" + / "BKE_blender_version.h" ) version_number = int(self._parse_header_file(blender_h, "BLENDER_VERSION")) - version_number_patch = int(self._parse_header_file(blender_h, "BLENDER_VERSION_PATCH")) + version_number_patch = int( + self._parse_header_file(blender_h, "BLENDER_VERSION_PATCH") + ) self.major, self.minor, self.patch = ( version_number // 100, version_number % 100, @@ -38,14 +44,16 @@ class VersionInfo: self.hash = self._parse_header_file(buildinfo_h, "BUILD_HASH")[1:-1] else: self.hash = "" - self.risk_id = self.version_cycle.replace("release", "stable").replace("rc", "candidate") + self.risk_id = self.version_cycle.replace("release", "stable").replace( + "rc", "candidate" + ) self.is_development_build = self.version_cycle == "alpha" def _parse_header_file(self, filename: pathlib.Path, define: str) -> str: regex = re.compile(r"^#\s*define\s+%s\s+(.*)" % define) with open(filename, "r") as file: - for l in file: - match = regex.match(l) + for line in file: + match = regex.match(line) if match: return match.group(1) diff --git a/config/worker/code_store.py b/config/worker/code_store.py index 0ad5736..f460b30 100755 --- a/config/worker/code_store.py +++ b/config/worker/code_store.py @@ -52,7 +52,9 @@ if __name__ == "__main__": steps["clean"] = worker.deploy.CodeDeployBuilder.clean parser = worker.blender.create_argument_parser(steps=steps) - parser.add_argument("--store-id", type=str, choices=["snap", "steam", "windows"], required=True) + parser.add_argument( + "--store-id", type=str, choices=["snap", "steam", "windows"], required=True + ) args = parser.parse_args() builder = worker.deploy.CodeStoreBuilder(args) diff --git a/config/worker/configure.py b/config/worker/configure.py index 4476e6c..be08382 100644 --- a/config/worker/configure.py +++ b/config/worker/configure.py @@ -22,7 +22,9 @@ def get_os_release() -> str: def get_cpu_info() -> str: if platform.system() == "Darwin": - return worker.utils.check_output(["/usr/sbin/sysctl", "-n", "machdep.cpu.brand_string"]) + return worker.utils.check_output( + ["/usr/sbin/sysctl", "-n", "machdep.cpu.brand_string"] + ) elif platform.system() == "Linux": cpuinfo = pathlib.Path("/proc/cpuinfo").read_text() for line in cpuinfo.splitlines(): @@ -101,9 +103,9 @@ def clean(builder: worker.utils.Builder) -> None: sorted_paths: List[Tuple[float, pathlib.Path]] = [] for delete_path in optional_delete_paths: try: - sorted_paths += [(os.path.getmtime(delete_path), delete_path)] - except: - pass + sorted_paths.append((os.path.getmtime(delete_path), delete_path)) + except (FileNotFoundError, PermissionError) as e: + worker.utils.warning(f"Unable to access {delete_path}: {e}") for _, delete_path in sorted(sorted_paths): worker.utils.remove_dir(delete_path) @@ -128,7 +130,9 @@ def configure_machine(builder: worker.utils.Builder) -> None: print(f"Release: {get_os_release()}") print(f"Version: {platform.version()}") print(f"Processor: {processor}") - print(f"Cores: {psutil.cpu_count()} logical, {psutil.cpu_count(logical=False)} physical") + print( + f"Cores: {psutil.cpu_count()} logical, {psutil.cpu_count(logical=False)} physical" + ) print(f"Total Memory: {psutil.virtual_memory().total / (1024**3):.2f} GB") print(f"Available Memory: {psutil.virtual_memory().available / (1024**3):.2f} GB") @@ -194,6 +198,11 @@ def configure_machine(builder: worker.utils.Builder) -> None: proc.kill() for proc in psutil.process_iter(): - if proc.name().lower() in ["blender", "blender.exe", "blender_test", "blender_test.exe"]: + if proc.name().lower() in [ + "blender", + "blender.exe", + "blender_test", + "blender_test.exe", + ]: worker.utils.warning("Killing stray Blender process") proc.kill() diff --git a/config/worker/deploy/artifacts.py b/config/worker/deploy/artifacts.py index 240e08b..c008fd5 100644 --- a/config/worker/deploy/artifacts.py +++ b/config/worker/deploy/artifacts.py @@ -19,10 +19,6 @@ checksums = ["md5", "sha256"] def pull(builder: worker.deploy.CodeDeployBuilder) -> None: - retry_count = 0 - retry_delay_in_seconds = 30 - timeout_in_seconds = 60 - pipeline_category = "daily" if builder.track_id == "vexp": pipeline_category = "experimental" @@ -75,7 +71,11 @@ def pull(builder: worker.deploy.CodeDeployBuilder) -> None: # Prefer more stable builds, to avoid issue when multiple are present. risk_id_order = ["stable", "candidate", "rc", "beta", "alpha", "edge"] risk = build["risk_id"] - risk = risk_id_order.index(risk) if risk in risk_id_order else len(risk_id_order) + risk = ( + risk_id_order.index(risk) + if risk in risk_id_order + else len(risk_id_order) + ) other_risk = unique_builds[key]["risk_id"] other_risk = ( risk_id_order.index(other_risk) @@ -92,7 +92,9 @@ def pull(builder: worker.deploy.CodeDeployBuilder) -> None: builds = list(unique_builds.values()) if len(builds) == 0: - raise Exception(f"No builds found for version [{version_info.version}] in [{search_url}]") + raise Exception( + f"No builds found for version [{version_info.version}] in [{search_url}]" + ) # Download builds. worker.utils.remove_dir(builder.download_dir) @@ -113,7 +115,9 @@ def pull(builder: worker.deploy.CodeDeployBuilder) -> None: # Moving to build_package folder worker.utils.info(f"Move to [{builder.package_dir}]") - worker.utils.move(download_file_path, builder.package_dir / download_file_path.name) + worker.utils.move( + download_file_path, builder.package_dir / download_file_path.name + ) worker.utils.remove_dir(builder.download_dir) @@ -164,7 +168,9 @@ def repackage(builder: worker.deploy.CodeDeployBuilder) -> None: if file_extension == "zip": worker.utils.info(f"Renaming internal folder to [{new_folder_name}]") - worker.utils.call(["7z", "rn", dest_file_path, current_folder_name, new_folder_name]) + worker.utils.call( + ["7z", "rn", dest_file_path, current_folder_name, new_folder_name] + ) elif file_extension == "tar.xz": worker.utils.info(f"Extracting [{source_file_path}] to [{dest_file_path}]") worker.utils.call(["tar", "-xf", source_file_path, "--directory", "."]) @@ -198,11 +204,15 @@ def repackage(builder: worker.deploy.CodeDeployBuilder) -> None: for checksum in checksums: checksum_text = "" for filepath in checksum_file_paths: - checksum_line = worker.utils.check_output([f"{checksum}sum", filepath.name]).strip() + checksum_line = worker.utils.check_output( + [f"{checksum}sum", filepath.name] + ).strip() checksum_text += checksum_line + "\n" print(checksum_text) - checksum_filepath = deployable_path / f"blender-{version_info.version}.{checksum}" + checksum_filepath = ( + deployable_path / f"blender-{version_info.version}.{checksum}" + ) checksum_filepath.write_text(checksum_text) @@ -218,34 +228,53 @@ def deploy(builder: worker.deploy.CodeDeployBuilder) -> None: if builder.service_env_id != "PROD": # Already assumed to exist on production - worker.utils.call_ssh(connect_id, ["mkdir", "-p", remote_dest_path], dry_run=dry_run) + worker.utils.call_ssh( + connect_id, ["mkdir", "-p", remote_dest_path], dry_run=dry_run + ) for source_path in builder.package_source_dir.iterdir(): dest_path = f"{connect_id}:{remote_dest_path}/" worker.utils.info(f"Deploying source package [{source_path}]") worker.utils.rsync( - source_path, dest_path, change_modes=change_modes, show_names=True, dry_run=dry_run + source_path, + dest_path, + change_modes=change_modes, + show_names=True, + dry_run=dry_run, ) - worker.utils.call_ssh(connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run) + worker.utils.call_ssh( + connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run + ) # Copy binaries version_info = worker.blender.version.VersionInfo(builder) major_minor_version = version_info.short_version remote_dest_path = ( - pathlib.Path(worker_config.download_release_folder) / f"Blender{major_minor_version}" + pathlib.Path(worker_config.download_release_folder) + / f"Blender{major_minor_version}" ) deployable_path = builder.package_dir / "deployable" change_modes = ["F0444"] - worker.utils.call_ssh(connect_id, ["mkdir", "-p", remote_dest_path], dry_run=dry_run) - worker.utils.call_ssh(connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run) + worker.utils.call_ssh( + connect_id, ["mkdir", "-p", remote_dest_path], dry_run=dry_run + ) + worker.utils.call_ssh( + connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run + ) for source_path in deployable_path.iterdir(): dest_path = f"{connect_id}:{remote_dest_path}/" worker.utils.info(f"Deploying binary package [{source_path}]") worker.utils.rsync( - source_path, dest_path, change_modes=change_modes, show_names=True, dry_run=dry_run + source_path, + dest_path, + change_modes=change_modes, + show_names=True, + dry_run=dry_run, ) - worker.utils.call_ssh(connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run) + worker.utils.call_ssh( + connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run + ) diff --git a/config/worker/deploy/monitor.py b/config/worker/deploy/monitor.py index 206cb63..56a0fef 100644 --- a/config/worker/deploy/monitor.py +++ b/config/worker/deploy/monitor.py @@ -37,7 +37,9 @@ def monitor(builder: worker.deploy.CodeDeployBuilder) -> None: stop_on_required_site_found = False branches_config = builder.get_branches_config() - expected_platforms = branches_config.code_official_platform_architectures[builder.track_id] + expected_platforms = branches_config.code_official_platform_architectures[ + builder.track_id + ] expected_file_count = len(worker.deploy.artifacts.checksums) for expected_platform in expected_platforms: @@ -61,7 +63,9 @@ def monitor(builder: worker.deploy.CodeDeployBuilder) -> None: print(f"Checking [{search_url}] for version [{version_info.version}]") # Header to avoid getting permission denied. - request = urllib.request.Request(search_url, headers={"User-Agent": "Mozilla"}) + request = urllib.request.Request( + search_url, headers={"User-Agent": "Mozilla"} + ) try: response = urllib.request.urlopen(request, timeout=5.0) @@ -71,7 +75,7 @@ def monitor(builder: worker.deploy.CodeDeployBuilder) -> None: text = "" matches = set(re.findall(file_pattern, text)) - found_file_count = len(matches) + len(matches) for match in matches: print(f"File [{match}]") @@ -93,7 +97,9 @@ def monitor(builder: worker.deploy.CodeDeployBuilder) -> None: print("") print("=" * 80) - print(f"Sites [{found_site_count} of {len(monitored_base_urls)}] have all files") + print( + f"Sites [{found_site_count} of {len(monitored_base_urls)}] have all files" + ) print("=" * 80) if found_site_count == len(monitored_base_urls): diff --git a/config/worker/deploy/pypi.py b/config/worker/deploy/pypi.py index 51b8ae1..8d54913 100644 --- a/config/worker/deploy/pypi.py +++ b/config/worker/deploy/pypi.py @@ -54,7 +54,9 @@ def pull(builder: worker.deploy.CodeDeployBuilder) -> None: # Check expected platforms branches_config = builder.get_branches_config() - expected_platforms = branches_config.code_official_platform_architectures[builder.track_id] + expected_platforms = branches_config.code_official_platform_architectures[ + builder.track_id + ] if len(expected_platforms) != len(matching_builds): platform_names = "\n".join(expected_platforms) raise Exception("Unexpected number of builds, expected:\n" + platform_names) @@ -81,7 +83,9 @@ def deliver(builder: worker.deploy.CodeDeployBuilder) -> None: # Check expected platforms branches_config = builder.get_branches_config() - expected_platforms = branches_config.code_official_platform_architectures[builder.track_id] + expected_platforms = branches_config.code_official_platform_architectures[ + builder.track_id + ] wheel_names = "\n".join([wheel.name for wheel in wheels]) wheel_paths = [str(wheel) for wheel in wheels] print(wheel_names) diff --git a/config/worker/deploy/snap.py b/config/worker/deploy/snap.py index cb06cb8..8f950cf 100644 --- a/config/worker/deploy/snap.py +++ b/config/worker/deploy/snap.py @@ -51,14 +51,20 @@ def package(builder: worker.deploy.CodeStoreBuilder) -> None: snap_source_root_path = freedesktop_path / "snap" blender_icon_file_name = "blender.svg" - snapcraft_template_file_path = snap_source_root_path / "blender-snapcraft-template.yaml" + snapcraft_template_file_path = ( + snap_source_root_path / "blender-snapcraft-template.yaml" + ) worker.utils.info(f"Using snap config file [{snapcraft_template_file_path}]") snapcraft_text = snapcraft_template_file_path.read_text() snapcraft_text = snapcraft_text.replace("@VERSION@", version_info.version) snapcraft_text = snapcraft_text.replace("@GRADE@", grade) - snapcraft_text = snapcraft_text.replace("@ICON_PATH@", f"./{blender_icon_file_name}") - snapcraft_text = snapcraft_text.replace("@PACKAGE_PATH@", f"./{linux_package_file_path.name}") + snapcraft_text = snapcraft_text.replace( + "@ICON_PATH@", f"./{blender_icon_file_name}" + ) + snapcraft_text = snapcraft_text.replace( + "@PACKAGE_PATH@", f"./{linux_package_file_path.name}" + ) snapcraft_file_path = builder.store_snap_dir / "snapcraft.yaml" worker.utils.info(f"Saving snapcraft config file [{snapcraft_file_path}]") @@ -74,7 +80,9 @@ def package(builder: worker.deploy.CodeStoreBuilder) -> None: os.chdir(builder.store_snap_dir) # Copy all required files into working folder - source_file_path = freedesktop_path / "icons" / "scalable" / "apps" / blender_icon_file_name + source_file_path = ( + freedesktop_path / "icons" / "scalable" / "apps" / blender_icon_file_name + ) dest_file_path = builder.store_snap_dir / "blender.svg" worker.utils.info(f"Copy file [{source_file_path}] -> [{dest_file_path}]") worker.utils.copy_file(source_file_path, dest_file_path) @@ -87,7 +95,8 @@ def package(builder: worker.deploy.CodeStoreBuilder) -> None: worker.utils.call(["snapcraft", "clean", "--use-lxd"], dry_run=dry_run) worker.utils.call(["snapcraft", "--use-lxd"], dry_run=dry_run) worker.utils.call( - ["review-tools.snap-review", snap_package_file_path, "--allow-classic"], dry_run=dry_run + ["review-tools.snap-review", snap_package_file_path, "--allow-classic"], + dry_run=dry_run, ) if dry_run: @@ -110,11 +119,14 @@ def deliver(builder: worker.deploy.CodeStoreBuilder) -> None: branches_config = builder.get_branches_config() is_lts = builder.track_id in branches_config.all_lts_tracks is_latest = ( - branches_config.track_major_minor_versions[builder.track_id] == version_info.short_version + branches_config.track_major_minor_versions[builder.track_id] + == version_info.short_version ) # Never push to stable - snap_risk_id = version_info.risk_id.replace("stable", "candidate").replace("alpha", "edge") + snap_risk_id = version_info.risk_id.replace("stable", "candidate").replace( + "alpha", "edge" + ) if snap_risk_id == "stable": raise Exception("Delivery to [stable] channel not allowed") @@ -139,7 +151,9 @@ def deliver(builder: worker.deploy.CodeStoreBuilder) -> None: worker_config = builder.get_worker_config() env = os.environ.copy() - env["SNAPCRAFT_STORE_CREDENTIALS"] = worker_config.snap_credentials(builder.service_env_id) + env["SNAPCRAFT_STORE_CREDENTIALS"] = worker_config.snap_credentials( + builder.service_env_id + ) # If this fails, then the permissions were not set correcty with acls worker.utils.call(["snapcraft", "status", "blender"], dry_run=dry_run, env=env) diff --git a/config/worker/deploy/source.py b/config/worker/deploy/source.py index cd58069..61edeb1 100644 --- a/config/worker/deploy/source.py +++ b/config/worker/deploy/source.py @@ -9,7 +9,9 @@ import worker.deploy import worker.utils -def _package(builder: worker.deploy.CodeDeployBuilder, needs_complete: bool = False) -> None: +def _package( + builder: worker.deploy.CodeDeployBuilder, needs_complete: bool = False +) -> None: os.chdir(builder.code_path) if needs_complete: worker.utils.call(["make", "source_archive_complete"]) diff --git a/config/worker/deploy/steam.py b/config/worker/deploy/steam.py index fa96bfe..45da4af 100644 --- a/config/worker/deploy/steam.py +++ b/config/worker/deploy/steam.py @@ -13,7 +13,9 @@ import worker.utils def extract_file( - builder: worker.deploy.CodeStoreBuilder, source_file_path: pathlib.Path, platform: str + builder: worker.deploy.CodeStoreBuilder, + source_file_path: pathlib.Path, + platform: str, ) -> None: worker.utils.info(f"Extracting artifact [{source_file_path}] for Steam") if not source_file_path.exists(): @@ -33,7 +35,9 @@ def extract_file( # Move any folder there as ./content for source_content_path in dest_extract_path.iterdir(): if source_content_path.is_dir(): - worker.utils.info(f"Move [{source_content_path.name}] -> [{dest_content_path}]") + worker.utils.info( + f"Move [{source_content_path.name}] -> [{dest_content_path}]" + ) worker.utils.move(source_content_path, dest_content_path) break @@ -55,8 +59,12 @@ def extract_file( worker.utils.remove_file(image_file_path) - worker.utils.info(f"Move Blender app from [{source_content_path}] -> [{dest_content_path}]") - worker.utils.move(source_content_path / "Blender.app", dest_content_path / "Blender.app") + worker.utils.info( + f"Move Blender app from [{source_content_path}] -> [{dest_content_path}]" + ) + worker.utils.move( + source_content_path / "Blender.app", dest_content_path / "Blender.app" + ) worker.utils.remove_dir(source_content_path) elif platform == "windows": worker.utils.info(f"Extracting zip file [{source_file_path}]") @@ -66,7 +74,9 @@ def extract_file( # Move any folder there as ./content for source_content_path in dest_extract_path.iterdir(): if source_content_path.is_dir(): - worker.utils.info(f"Move [{source_content_path.name}] -> [{dest_content_path}]") + worker.utils.info( + f"Move [{source_content_path.name}] -> [{dest_content_path}]" + ) worker.utils.move(source_content_path, dest_content_path) break else: @@ -97,9 +107,10 @@ def build(builder: worker.deploy.CodeStoreBuilder, is_preview: bool) -> None: version_info = worker.blender.version.VersionInfo(builder) branches_config = builder.get_branches_config() is_lts = builder.track_id in branches_config.all_lts_tracks - is_latest = branches_config.track_major_minor_versions["vdev"] == version_info.short_version + is_latest = ( + branches_config.track_major_minor_versions["vdev"] == version_info.short_version + ) - track_path = builder.track_path log_path = builder.track_path / "log" worker.utils.remove_dir(log_path) os.makedirs(log_path, exist_ok=True) diff --git a/config/worker/deploy/windows.py b/config/worker/deploy/windows.py index 8bece72..25220c7 100644 --- a/config/worker/deploy/windows.py +++ b/config/worker/deploy/windows.py @@ -52,12 +52,16 @@ def _package_architecture( input_file_path = builder.package_dir / build["file_name"] break if not input_file_path: - raise Exception(f"Windows package not found in [{builder.package_dir}] manifest") + raise Exception( + f"Windows package not found in [{builder.package_dir}] manifest" + ) # Copy all required files into working folder source_path = builder.code_path / "release" / "windows" / "msix" dest_path = builder.store_windows_dir - worker.utils.info(f"Copying [{source_path}] -> [{dest_path}] for windows store packaging") + worker.utils.info( + f"Copying [{source_path}] -> [{dest_path}] for windows store packaging" + ) for source_file in source_path.iterdir(): if source_file.name == "README.md": @@ -104,7 +108,9 @@ def package(builder: worker.deploy.CodeStoreBuilder) -> None: raise Exception("Can only run this on Windows, aborting") branches_config = builder.get_branches_config() - expected_platforms = branches_config.code_official_platform_architectures[builder.track_id] + expected_platforms = branches_config.code_official_platform_architectures[ + builder.track_id + ] for expected_platform in expected_platforms: if expected_platform.startswith("windows"): diff --git a/config/worker/doc_api.py b/config/worker/doc_api.py index 06bf744..5cd255f 100755 --- a/config/worker/doc_api.py +++ b/config/worker/doc_api.py @@ -35,7 +35,9 @@ def download_api_dump_test_data(local_delivery_path: pathlib.Path) -> None: api_base_url = "https://docs.blender.org/api" api_dump_index_url = f"{api_base_url}/api_dump_index.json" - request = urllib.request.Request(api_dump_index_url, headers={"User-Agent": "Mozilla"}) + request = urllib.request.Request( + api_dump_index_url, headers={"User-Agent": "Mozilla"} + ) response = urllib.request.urlopen(request, timeout=5.0) api_dump_index_text = response.read().decode("utf-8", "ignore") @@ -48,7 +50,9 @@ def download_api_dump_test_data(local_delivery_path: pathlib.Path) -> None: api_dump_url = f"{api_base_url}/{version}/api_dump.json" worker.utils.info(f"Download {api_dump_url}") - request = urllib.request.Request(api_dump_url, headers={"User-Agent": "Mozilla"}) + request = urllib.request.Request( + api_dump_url, headers={"User-Agent": "Mozilla"} + ) response = urllib.request.urlopen(request, timeout=5.0) api_dump_text = response.read().decode("utf-8", "ignore") @@ -97,7 +101,10 @@ def compile_doc(builder: DocApiBuilder) -> None: dest_path = api_dump_build_path worker.utils.rsync( - source_path, dest_path, include_paths=api_dump_include_paths, exclude_paths=["*"] + source_path, + dest_path, + include_paths=api_dump_include_paths, + exclude_paths=["*"], ) version = worker.blender.version.VersionInfo(builder).short_version @@ -125,7 +132,9 @@ def compile_doc(builder: DocApiBuilder) -> None: in_path = builder.build_doc_path / "sphinx-in" out_path = builder.build_doc_path / "sphinx-out-html" - worker.utils.call(["sphinx-build", "-b", "html", "-j", str(num_threads), in_path, out_path]) + worker.utils.call( + ["sphinx-build", "-b", "html", "-j", str(num_threads), in_path, out_path] + ) def package(builder: DocApiBuilder) -> None: @@ -177,7 +186,9 @@ def deliver(builder: DocApiBuilder) -> None: # Put API dumps data on the server. api_dump_build_path = f"{builder.build_doc_path}/api_dump/" api_dump_dest_path = f"{connect_id}:{remote_path}/" - worker.utils.rsync(api_dump_build_path, api_dump_dest_path, change_modes=change_modes) + worker.utils.rsync( + api_dump_build_path, api_dump_dest_path, change_modes=change_modes + ) # Sync zip package if builder.needs_package_delivery: @@ -189,7 +200,10 @@ def deliver(builder: DocApiBuilder) -> None: source_file_path = builder.build_doc_path / package_file_name dest_file_path = f"{connect_id}:{version_remote_path}/{package_file_name}" worker.utils.rsync( - source_file_path, dest_file_path, exclude_paths=[".doctrees"], change_modes=change_modes + source_file_path, + dest_file_path, + exclude_paths=[".doctrees"], + change_modes=change_modes, ) # Create links @@ -198,16 +212,19 @@ def deliver(builder: DocApiBuilder) -> None: connect_id, ["ln", "-svF", remote_path / dev_version, remote_path / "dev"] ) worker.utils.call_ssh( - connect_id, ["ln", "-svF", remote_path / dev_version, remote_path / "master"] + connect_id, + ["ln", "-svF", remote_path / dev_version, remote_path / "master"], ) worker.utils.call_ssh( connect_id, ["ln", "-svF", remote_path / dev_version, remote_path / "main"] ) worker.utils.call_ssh( - connect_id, ["ln", "-svF", remote_path / latest_version, remote_path / "latest"] + connect_id, + ["ln", "-svF", remote_path / latest_version, remote_path / "latest"], ) worker.utils.call_ssh( - connect_id, ["ln", "-svF", remote_path / latest_version, remote_path / "current"] + connect_id, + ["ln", "-svF", remote_path / latest_version, remote_path / "current"], ) diff --git a/config/worker/doc_developer.py b/config/worker/doc_developer.py index 50fbd8f..71cd6e8 100755 --- a/config/worker/doc_developer.py +++ b/config/worker/doc_developer.py @@ -29,7 +29,9 @@ def update(builder: DocDeveloperBuilder) -> None: def compile_doc(builder: DocDeveloperBuilder) -> None: os.chdir(builder.track_path) - worker.utils.call_pipenv(["install", "--requirements", builder.code_path / "requirements.txt"]) + worker.utils.call_pipenv( + ["install", "--requirements", builder.code_path / "requirements.txt"] + ) worker.utils.remove_dir(builder.output_path) @@ -48,7 +50,9 @@ def deliver(builder: DocDeveloperBuilder) -> None: remote_path = f"developer.blender.org/webroot/{builder.service_env_id}/docs" connect_id = f"{worker_config.docs_user}@{worker_config.docs_machine}" - server_docs_path = pathlib.Path(worker_config.docs_folder) / pathlib.Path(remote_path) + server_docs_path = pathlib.Path(worker_config.docs_folder) / pathlib.Path( + remote_path + ) change_modes = ["D0755", "F0644"] source_path = f"{builder.output_path}/" diff --git a/config/worker/doc_manual.py b/config/worker/doc_manual.py index c15654a..32f7e0d 100755 --- a/config/worker/doc_manual.py +++ b/config/worker/doc_manual.py @@ -35,7 +35,9 @@ class ManualBuilder(worker.utils.Builder): if self.needs_all_locales: locale_path = self.code_path / "locale" locales += [ - item.name for item in locale_path.iterdir() if not item.name.startswith(".") + item.name + for item in locale_path.iterdir() + if not item.name.startswith(".") ] return locales @@ -50,7 +52,9 @@ def update(builder: ManualBuilder) -> None: def check(builder: ManualBuilder) -> None: os.chdir(builder.track_path) - worker.utils.call_pipenv(["install", "--pre", "--requirements", builder.code_path / "requirements.txt"]) + worker.utils.call_pipenv( + ["install", "--pre", "--requirements", builder.code_path / "requirements.txt"] + ) os.chdir(builder.code_path) @@ -63,7 +67,9 @@ def check(builder: ManualBuilder) -> None: def compile_doc(builder: ManualBuilder) -> None: # Install requirements. os.chdir(builder.track_path) - worker.utils.call_pipenv(["install", "--pre", "--requirements", builder.code_path / "requirements.txt"]) + worker.utils.call_pipenv( + ["install", "--pre", "--requirements", builder.code_path / "requirements.txt"] + ) # Determine format and locales locales = builder.get_locales() @@ -127,8 +133,12 @@ def compile_doc(builder: ManualBuilder) -> None: # Hack appropriate versions.json URL into version_switch.js worker.utils.info("Replacing URL in version_switch.js") - version_switch_file_path = build_output_path / "_static" / "js" / "version_switch.js" - versions_file_url = f"https://docs.blender.org/{builder.service_env_id}/versions.json" + version_switch_file_path = ( + build_output_path / "_static" / "js" / "version_switch.js" + ) + versions_file_url = ( + f"https://docs.blender.org/{builder.service_env_id}/versions.json" + ) version_switch_text = version_switch_file_path.read_text() version_switch_text = version_switch_text.replace( @@ -229,17 +239,24 @@ def deliver(builder: ManualBuilder) -> None: dest_path, exclude_paths=[".doctrees", "blender_manual_*.zip"], delete=True, - delete_path_check=str(version_remote_path) + delete_path_check=str(version_remote_path), ) # Create links if builder.track_id == "vdev": worker.utils.info(f"Creating links for {locale}") worker.utils.call_ssh( - connect_id, ["ln", "-svF", remote_path / dev_version, remote_path / "dev"] + connect_id, + ["ln", "-svF", remote_path / dev_version, remote_path / "dev"], ) worker.utils.call_ssh( - connect_id, ["ln", "-svF", remote_path / latest_version, remote_path / "latest"] + connect_id, + [ + "ln", + "-svF", + remote_path / latest_version, + remote_path / "latest", + ], ) if builder.needs_package_delivery: @@ -281,7 +298,11 @@ if __name__ == "__main__": parser.add_argument("--needs-all-locales", action="store_true", required=False) parser.add_argument("--needs-package-delivery", action="store_true", required=False) parser.add_argument( - "--doc-format", default="html", type=str, required=False, choices=["html", "epub"] + "--doc-format", + default="html", + type=str, + required=False, + choices=["html", "epub"], ) args = parser.parse_args() diff --git a/config/worker/doc_studio.py b/config/worker/doc_studio.py index 3b6104a..b6afedd 100755 --- a/config/worker/doc_studio.py +++ b/config/worker/doc_studio.py @@ -44,7 +44,9 @@ def deliver(builder: worker.utils.Builder) -> None: change_modes = ["D0755", "F0644"] if builder.service_env_id == "LOCAL" and builder.platform == "darwin": - worker.utils.warning("rsync change_owner not supported on darwin, ignoring for LOCAL") + worker.utils.warning( + "rsync change_owner not supported on darwin, ignoring for LOCAL" + ) change_owner = None else: change_owner = "buildbot:www-data" diff --git a/config/worker/utils.py b/config/worker/utils.py index ba2e77f..74abf32 100644 --- a/config/worker/utils.py +++ b/config/worker/utils.py @@ -102,7 +102,7 @@ def _log_cmd(msg: str) -> None: _warnings += [msg] return - print(msg.encode('ascii', errors='replace').decode('ascii'), flush=True) + print(msg.encode("ascii", errors="replace").decode("ascii"), flush=True) # Command execution @@ -118,7 +118,9 @@ CmdFilterOutput = Optional[Callable[[str], Optional[str]]] CmdEnvironment = Optional[Dict[str, str]] -def _prepare_call(cmd: CmdSequence, dry_run: bool = False) -> Sequence[Union[str, pathlib.Path]]: +def _prepare_call( + cmd: CmdSequence, dry_run: bool = False +) -> Sequence[Union[str, pathlib.Path]]: real_cmd: List[Union[str, pathlib.Path]] = [] log_cmd: List[str] = [] @@ -174,9 +176,9 @@ def call( if line: line_str = line.strip("\n\r") if filter_output: - line_str_filter = filter_output(line_str) + filter_output(line_str) else: - line_str_filter = line_str + pass if line_str: _log_cmd(line_str) else: @@ -206,7 +208,9 @@ def check_output(cmd: CmdSequence, exit_on_error: bool = True) -> str: sys.stderr.flush() try: - output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, universal_newlines=True) + output = subprocess.check_output( + cmd, stderr=subprocess.STDOUT, universal_newlines=True + ) except subprocess.CalledProcessError as e: if exit_on_error: sys.exit(e.returncode) @@ -255,9 +259,8 @@ def rsync( if str(dest_path).find(delete_path_check) == -1: raise Exception("Rsync: remote path must contain '{delete_path_check}'") - info_options = "progress0,flist0,name0,stats2" if show_names: - info_options = "progress0,flist0,name1,stats2" + pass cmd: List[Union[str, pathlib.Path, HiddenArgument]] = [ "rsync", @@ -294,20 +297,27 @@ def move(path_from: pathlib.Path, path_to: pathlib.Path, dry_run: bool = False) shutil.move(str(path_from), path_to) -def copy_dir(path_from: pathlib.Path, path_to: pathlib.Path, dry_run: bool = False) -> None: +def copy_dir( + path_from: pathlib.Path, path_to: pathlib.Path, dry_run: bool = False +) -> None: if dry_run: return shutil.copytree(path_from, path_to) -def copy_file(path_from: pathlib.Path, path_to: pathlib.Path, dry_run: bool = False) -> None: +def copy_file( + path_from: pathlib.Path, path_to: pathlib.Path, dry_run: bool = False +) -> None: if dry_run: return shutil.copy2(path_from, path_to) def remove_file( - path: pathlib.Path, retry_count: int = 3, retry_wait_time: float = 5.0, dry_run: bool = False + path: pathlib.Path, + retry_count: int = 3, + retry_wait_time: float = 5.0, + dry_run: bool = False, ) -> None: if not path.exists(): return @@ -316,52 +326,54 @@ def remove_file( return info(f"Removing {path}") - for try_count in range(0, retry_count): + for try_count in range(retry_count): try: - try: - if path.exists(): - path.unlink() - except FileNotFoundError: - pass + if path.exists(): + path.unlink() return - except: + except FileNotFoundError: + # File was already removed by another process. + return + except PermissionError as e: + warning(f"Permission error when removing {path}: {e}") + time.sleep(retry_wait_time) + except OSError as e: + warning(f"OS error when removing {path}: {e}") time.sleep(retry_wait_time) - # Not using missing_ok yet for Python3.6 compatibility. + # Final attempt outside the retry loop try: if path.exists(): path.unlink() except FileNotFoundError: pass + except PermissionError as e: + error(f"Failed to remove {path} due to permission issues: {e}") + except OSError as e: + error(f"Failed to remove {path} after retries due to OS error: {e}") # Retry several times by default, giving it a chance for possible antivirus to release # a lock on files in the build folder. Happened for example with MSI files on Windows. def remove_dir( - path: pathlib.Path, retry_count: int = 3, retry_wait_time: float = 5.0, dry_run: bool = False + path: pathlib.Path, retry_count: int = 3, retry_wait_time: float = 5.0 ) -> None: - if not path.exists(): - return - if dry_run: - info(f"Removing {path} (dry run)") - return - - info(f"Removing {path}") - for try_count in range(0, retry_count): + for try_count in range(retry_count): try: if path.exists(): shutil.rmtree(path) - return - except: - if platform.system().lower() == "windwos": - # XXX: Windows builder debug. - # Often the `build_package` is failed to be removed because - # of the "Access Denied" error on blender-windows64.msi. - # Run some investigation commands to see what is going on. + return # Successfully removed, no need to retry + except PermissionError as e: + if platform.system().lower() == "windows": + # Debugging access denied errors on Windows if path.name == "build_package": info("Removal of package artifacts folder failed. Investigating...") msi_path = ( - path / "_CPack_Packages" / "Windows" / "WIX" / "blender-windows64.msi" + path + / "_CPack_Packages" + / "Windows" + / "WIX" + / "blender-windows64.msi" ) if msi_path.exists(): info(f"Information about [{msi_path}]") @@ -376,11 +388,23 @@ def remove_dir( ) else: info(f"MSI package file [{msi_path}] does not exist") - + warning(f"Permission error when removing {path}: {e}") + time.sleep(retry_wait_time) + except FileNotFoundError: + # The directory is already gone; no action needed. + return + except OSError as e: + warning(f"OS error when attempting to remove {path}: {e}") time.sleep(retry_wait_time) + # Final attempt outside of retries if path.exists(): - shutil.rmtree(path) + try: + shutil.rmtree(path) + except PermissionError as e: + error(f"Failed to remove {path} due to permission issues: {e}") + except OSError as e: + error(f"Failed to remove {path} after retries due to OS error: {e}") def is_tool(name: Union[str, pathlib.Path]) -> bool: @@ -409,7 +433,9 @@ def update_source( warning("Removing git lock, probably left behind by killed git process") remove_file(index_lock_path) for index_lock_path in (code_path / ".git" / "modules").rglob("index.lock"): - warning("Removing submodule git lock, probably left behind by killed git process") + warning( + "Removing submodule git lock, probably left behind by killed git process" + ) remove_file(index_lock_path) os.chdir(code_path) @@ -438,7 +464,15 @@ def update_source( # Checkout pull request into PR123 branch. call(["git", "checkout", "main"]) - call(["git", "fetch", "-f", "origin", f"pull/{pull_request_id}/head:{branch_name}"]) + call( + [ + "git", + "fetch", + "-f", + "origin", + f"pull/{pull_request_id}/head:{branch_name}", + ] + ) call(["git", "checkout", branch_name]) if commit_id and (commit_id != "HEAD"): -- 2.45.2 From 77ae214d24764eed2938b3a213e720fa5cbe40fd Mon Sep 17 00:00:00 2001 From: Bart van der Braak Date: Wed, 20 Nov 2024 02:29:30 +0100 Subject: [PATCH 07/13] Change environment variable setup --- .env.example | 3 -- .env.local | 4 +++ .gitignore | 3 +- config/conf/auth.py | 14 +++++----- config/conf/local/auth.py | 2 +- config/conf/local/machines.py | 2 +- config/conf/machines.py | 14 +++++----- config/conf/worker.py | 4 +-- config/gitea/blender.py | 6 ++-- config/pipeline/__init__.py | 4 +-- config/pipeline/code.py | 24 ++++++++-------- config/pipeline/code_benchmark.py | 10 +++---- config/pipeline/code_bpy_deploy.py | 4 +-- config/pipeline/code_deploy.py | 4 +-- config/pipeline/code_store.py | 12 ++++---- config/pipeline/common.py | 22 +++++++-------- config/pipeline/doc_api.py | 4 +-- config/pipeline/doc_developer.py | 4 +-- config/pipeline/doc_manual.py | 4 +-- config/pipeline/doc_studio.py | 4 +-- config/setup.py | 44 ++++++++++++------------------ docker-compose.override.yml | 23 ++++++++++++++-- docker-compose.yml | 20 +------------- 23 files changed, 113 insertions(+), 122 deletions(-) delete mode 100644 .env.example create mode 100644 .env.local diff --git a/.env.example b/.env.example deleted file mode 100644 index fd89b15..0000000 --- a/.env.example +++ /dev/null @@ -1,3 +0,0 @@ -SERVICE_USER_POSTGRESQL=buildbot -SERVICE_PASSWORD_POSTGRESQL=changeme! -BUILDBOT_CONFIG_URL='' \ No newline at end of file diff --git a/.env.local b/.env.local new file mode 100644 index 0000000..0cdf0de --- /dev/null +++ b/.env.local @@ -0,0 +1,4 @@ +SERVICE_USER_POSTGRESQL=buildbot +SERVICE_PASSWORD_POSTGRESQL=changeme! +GITEA_URL=https://projects.blender.org +BUILDBOT_WEB_URL=http://localhost:8010/ \ No newline at end of file diff --git a/.gitignore b/.gitignore index c2eabec..86e252f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,3 @@ .venv -.env \ No newline at end of file +.env.staging +.env.production \ No newline at end of file diff --git a/config/conf/auth.py b/config/conf/auth.py index 8d67280..271d8e7 100644 --- a/config/conf/auth.py +++ b/config/conf/auth.py @@ -7,8 +7,8 @@ import importlib import buildbot.plugins -def _get_auth_config(devops_env_id: str): - if devops_env_id == "LOCAL": +def _get_auth_config(ENVIRONMENT: str): + if ENVIRONMENT == "LOCAL": import conf.local.auth importlib.reload(conf.local.auth) @@ -20,13 +20,13 @@ def _get_auth_config(devops_env_id: str): return conf.production.auth -def fetch_authentication(devops_env_id: str): - auth_config = _get_auth_config(devops_env_id) - return auth_config.get_authentication(devops_env_id) +def fetch_authentication(ENVIRONMENT: str): + auth_config = _get_auth_config(ENVIRONMENT) + return auth_config.get_authentication(ENVIRONMENT) -def fetch_authorization(devops_env_id: str): - auth_config = _get_auth_config(devops_env_id) +def fetch_authorization(ENVIRONMENT: str): + auth_config = _get_auth_config(ENVIRONMENT) admin_usernames = auth_config.admin_usernames deploy_dev_usernames = auth_config.deploy_dev_usernames diff --git a/config/conf/local/auth.py b/config/conf/local/auth.py index b677be9..0c1eb51 100644 --- a/config/conf/local/auth.py +++ b/config/conf/local/auth.py @@ -20,7 +20,7 @@ trusted_dev_usernames = [ ] -def get_authentication(devops_env_id: str): +def get_authentication(ENVIRONMENT: str): class LocalEnvAuth(buildbot.plugins.util.CustomAuth): def check_credentials(self, user, password): return user.decode() == "admin" and password.decode() == "admin" diff --git a/config/conf/local/machines.py b/config/conf/local/machines.py index 9087e67..fb430e7 100644 --- a/config/conf/local/machines.py +++ b/config/conf/local/machines.py @@ -27,5 +27,5 @@ def get_worker_password(worker_name: str) -> str: return "localhost" -def get_worker_names(devops_env_id: str): +def get_worker_names(ENVIRONMENT: str): return _worker_names diff --git a/config/conf/machines.py b/config/conf/machines.py index 55b1aa5..4c36e83 100644 --- a/config/conf/machines.py +++ b/config/conf/machines.py @@ -5,8 +5,8 @@ import importlib -def _get_config(devops_env_id: str): - if devops_env_id == "LOCAL": +def _get_config(ENVIRONMENT: str): + if ENVIRONMENT == "LOCAL": import conf.local.machines importlib.reload(conf.local.machines) @@ -18,13 +18,13 @@ def _get_config(devops_env_id: str): return conf.production.machines -def fetch_platform_worker_names(devops_env_id: str): - machines_config = _get_config(devops_env_id) - return machines_config.get_worker_names(devops_env_id) +def fetch_platform_worker_names(ENVIRONMENT: str): + machines_config = _get_config(ENVIRONMENT) + return machines_config.get_worker_names(ENVIRONMENT) -def get_worker_password(devops_env_id: str, worker_name: str) -> str: - machines_config = _get_config(devops_env_id) +def get_worker_password(ENVIRONMENT: str, worker_name: str) -> str: + machines_config = _get_config(ENVIRONMENT) return machines_config.get_worker_password(worker_name) diff --git a/config/conf/worker.py b/config/conf/worker.py index 963becf..60adb0c 100644 --- a/config/conf/worker.py +++ b/config/conf/worker.py @@ -7,8 +7,8 @@ import importlib from typing import Any -def get_config(devops_env_id: str) -> Any: - if devops_env_id == "LOCAL": +def get_config(ENVIRONMENT: str) -> Any: + if ENVIRONMENT == "LOCAL": import conf.local.worker importlib.reload(conf.local.worker) diff --git a/config/gitea/blender.py b/config/gitea/blender.py index 4bd27c1..4d0f1c4 100644 --- a/config/gitea/blender.py +++ b/config/gitea/blender.py @@ -22,12 +22,12 @@ gitea_api_token = None gitea_status_service = None -def setup_service(devops_env_id: str): +def setup_service(ENVIRONMENT: str): import conf.worker importlib.reload(conf.worker) - worker_config = conf.worker.get_config(devops_env_id) - gitea_api_token = worker_config.gitea_api_token(devops_env_id) + worker_config = conf.worker.get_config(ENVIRONMENT) + gitea_api_token = worker_config.gitea_api_token(ENVIRONMENT) if gitea_api_token: log.msg("Found Gitea API token, enabling status push") diff --git a/config/pipeline/__init__.py b/config/pipeline/__init__.py index 5ee14bc..e262cf0 100644 --- a/config/pipeline/__init__.py +++ b/config/pipeline/__init__.py @@ -23,7 +23,7 @@ importlib.reload(pipeline.common) importlib.reload(conf.branches) -def populate(devops_env_id): +def populate(ENVIRONMENT): pipelines_modules = [ pipeline.code, pipeline.code_benchmark, @@ -41,7 +41,7 @@ def populate(devops_env_id): for pipelines_module in pipelines_modules: importlib.reload(pipelines_module) - b, s = pipelines_module.populate(devops_env_id) + b, s = pipelines_module.populate(ENVIRONMENT) builders += b schedulers += s diff --git a/config/pipeline/code.py b/config/pipeline/code.py index 9f42b5b..68e33ea 100644 --- a/config/pipeline/code.py +++ b/config/pipeline/code.py @@ -233,7 +233,7 @@ scheduler_properties = { @buildbot.plugins.util.renderer def create_code_worker_command_args( - props, devops_env_id, track_id, pipeline_type, step_name + props, ENVIRONMENT, track_id, pipeline_type, step_name ): commit_id = pipeline.common.fetch_property(props, key="revision", default="HEAD") patch_id = pipeline.common.fetch_property(props, key="patch_id", default="") @@ -294,9 +294,7 @@ def create_code_worker_command_args( args += [step_name] - return pipeline.common.create_worker_command( - "code.py", devops_env_id, track_id, args - ) + return pipeline.common.create_worker_command("code.py", ENVIRONMENT, track_id, args) def needs_do_code_pipeline_step(step): @@ -451,17 +449,17 @@ class PlatformTrigger(plugins_steps.Trigger): return schedulers -def populate(devops_env_id): +def populate(ENVIRONMENT): builders = [] schedulers = [] - platform_worker_names = conf.machines.fetch_platform_worker_names(devops_env_id) + platform_worker_names = conf.machines.fetch_platform_worker_names(ENVIRONMENT) local_worker_names = conf.machines.fetch_local_worker_names() - worker_config = conf.worker.get_config(devops_env_id) + worker_config = conf.worker.get_config(ENVIRONMENT) - needs_incremental_schedulers = devops_env_id in ["PROD"] - needs_nightly_schedulers = devops_env_id in ["PROD"] + needs_incremental_schedulers = ENVIRONMENT in ["PROD"] + needs_nightly_schedulers = ENVIRONMENT in ["PROD"] print("*** Creating [code] pipeline") for track_id in code_track_ids: @@ -493,7 +491,7 @@ def populate(devops_env_id): step_timeout_in_seconds = compile_gpu_step_timeout_in_seconds step_command = create_code_worker_command_args.withArgs( - devops_env_id, track_id, pipeline_type, step_name + ENVIRONMENT, track_id, pipeline_type, step_name ) step = buildbot.plugins.steps.ShellCommand( @@ -512,7 +510,7 @@ def populate(devops_env_id): for master_step_name in pipeline.common.code_pipeline_master_step_names: master_step_command = ( pipeline.common.create_master_command_args.withArgs( - devops_env_id, + ENVIRONMENT, track_id, pipeline_type, master_step_name, @@ -536,7 +534,7 @@ def populate(devops_env_id): pipeline_lint_factory = buildbot.plugins.util.BuildFactory() for step_name in code_pipeline_lint_step_names: step_command = create_code_worker_command_args.withArgs( - devops_env_id, track_id, pipeline_type, step_name + ENVIRONMENT, track_id, pipeline_type, step_name ) pipeline_lint_factory.addStep( @@ -576,7 +574,7 @@ def populate(devops_env_id): suitable_pipeline_worker_names = pipeline_worker_names if ( platform_architecture == "linux-x86_64" - and devops_env_id != "LOCAL" + and ENVIRONMENT != "LOCAL" ): selector = "rocky" suitable_pipeline_worker_names = [ diff --git a/config/pipeline/code_benchmark.py b/config/pipeline/code_benchmark.py index ab695d0..55aa315 100644 --- a/config/pipeline/code_benchmark.py +++ b/config/pipeline/code_benchmark.py @@ -26,8 +26,8 @@ class LinkMultipleFileUpload(plugins_steps.MultipleFileUpload): return -def create_deliver_step(devops_env_id): - worker_config = conf.worker.get_config(devops_env_id) +def create_deliver_step(ENVIRONMENT): + worker_config = conf.worker.get_config(ENVIRONMENT) file_size_in_mb = 500 * 1024 * 1024 worker_source_path = pathlib.Path("../../../../git/blender-vdev/build_package") @@ -48,7 +48,7 @@ def create_deliver_step(devops_env_id): ) -def populate(devops_env_id): +def populate(ENVIRONMENT): properties = [ buildbot.plugins.util.StringParameter( name="commit_id", @@ -68,7 +68,7 @@ def populate(devops_env_id): ] return pipeline.common.create_pipeline( - devops_env_id, + ENVIRONMENT, "code-benchmark", "code_benchmark.py", [ @@ -78,7 +78,7 @@ def populate(devops_env_id): "compile-gpu", "compile-install", "benchmark", - partial(create_deliver_step, devops_env_id), + partial(create_deliver_step, ENVIRONMENT), "clean", ], {"vdev": "main"}, diff --git a/config/pipeline/code_bpy_deploy.py b/config/pipeline/code_bpy_deploy.py index b5e5f48..b5e94d0 100644 --- a/config/pipeline/code_bpy_deploy.py +++ b/config/pipeline/code_bpy_deploy.py @@ -9,11 +9,11 @@ import conf.branches import pipeline.common -def populate(devops_env_id): +def populate(ENVIRONMENT): properties = [] return pipeline.common.create_pipeline( - devops_env_id, + ENVIRONMENT, "code-bpy-deploy", "code_bpy_deploy.py", [ diff --git a/config/pipeline/code_deploy.py b/config/pipeline/code_deploy.py index cb89aba..d4c9d8f 100644 --- a/config/pipeline/code_deploy.py +++ b/config/pipeline/code_deploy.py @@ -10,7 +10,7 @@ import conf.branches import pipeline.common -def populate(devops_env_id): +def populate(ENVIRONMENT): properties = [ buildbot.plugins.util.BooleanParameter( name="needs_full_clean", @@ -22,7 +22,7 @@ def populate(devops_env_id): ] return pipeline.common.create_pipeline( - devops_env_id, + ENVIRONMENT, "code-artifacts-deploy", "code_deploy.py", [ diff --git a/config/pipeline/code_store.py b/config/pipeline/code_store.py index 64df456..c4d2f15 100644 --- a/config/pipeline/code_store.py +++ b/config/pipeline/code_store.py @@ -62,16 +62,16 @@ def create_deliver_binaries_windows_step(worker_config, track_id, pipeline_type) ) -def populate(devops_env_id): +def populate(ENVIRONMENT): builders = [] schedulers = [] - platform_worker_names = conf.machines.fetch_platform_worker_names(devops_env_id) + platform_worker_names = conf.machines.fetch_platform_worker_names(ENVIRONMENT) local_worker_names = conf.machines.fetch_local_worker_names() - worker_config = conf.worker.get_config(devops_env_id) + worker_config = conf.worker.get_config(ENVIRONMENT) - needs_nightly_schedulers = devops_env_id == "PROD" + needs_nightly_schedulers = ENVIRONMENT == "PROD" pipeline_type = "daily" @@ -108,7 +108,7 @@ def populate(devops_env_id): else: args = ["--store-id", store_id, step_name] step_command = pipeline.common.create_worker_command( - "code_store.py", devops_env_id, track_id, args + "code_store.py", ENVIRONMENT, track_id, args ) step = plugins_steps.ShellCommand( @@ -126,7 +126,7 @@ def populate(devops_env_id): for master_step_name in pipeline.common.code_pipeline_master_step_names: master_step_command = ( pipeline.common.create_master_command_args.withArgs( - devops_env_id, + ENVIRONMENT, track_id, pipeline_type, master_step_name, diff --git a/config/pipeline/common.py b/config/pipeline/common.py index 14bb9fc..98b92bf 100644 --- a/config/pipeline/common.py +++ b/config/pipeline/common.py @@ -57,7 +57,7 @@ def needs_do_doc_pipeline_step(step): return True -def create_worker_command(script, devops_env_id, track_id, args): +def create_worker_command(script, ENVIRONMENT, track_id, args): # This relative path assume were are in: # ~/.devops/services/buildbot-worker//build # There appears to be no way to expand a tilde here? @@ -71,7 +71,7 @@ def create_worker_command(script, devops_env_id, track_id, args): "--track-id", track_id, "--service-env-id", - devops_env_id, + ENVIRONMENT, ] return cmd + list(args) @@ -79,7 +79,7 @@ def create_worker_command(script, devops_env_id, track_id, args): @buildbot.plugins.util.renderer def create_master_command_args( - props, devops_env_id, track_id, pipeline_type, step_name, single_platform + props, ENVIRONMENT, track_id, pipeline_type, step_name, single_platform ): build_configuration = fetch_property( props, key="build_configuration", default="release" @@ -116,7 +116,7 @@ def create_master_command_args( "--track-id", track_id, "--service-env-id", - devops_env_id, + ENVIRONMENT, ] return cmd + list(args) @@ -125,7 +125,7 @@ def create_master_command_args( @buildbot.plugins.util.renderer def create_pipeline_worker_command( props, - devops_env_id, + ENVIRONMENT, track_id, script, step_name, @@ -154,11 +154,11 @@ def create_pipeline_worker_command( if "revision" in props and props["revision"]: args += ["--commit-id", props["revision"]] - return create_worker_command(script, devops_env_id, track_id, args) + return create_worker_command(script, ENVIRONMENT, track_id, args) def create_pipeline( - devops_env_id, + ENVIRONMENT, artifact_id, script, steps, @@ -179,13 +179,13 @@ def create_pipeline( builders = [] schedulers = [] - platform_worker_names = conf.machines.fetch_platform_worker_names(devops_env_id) + platform_worker_names = conf.machines.fetch_platform_worker_names(ENVIRONMENT) local_worker_names = conf.machines.fetch_local_worker_names() needs_incremental_schedulers = ( - incremental_properties is not None and devops_env_id in ["PROD"] + incremental_properties is not None and ENVIRONMENT in ["PROD"] ) - needs_nightly_schedulers = nightly_properties is not None and devops_env_id in [ + needs_nightly_schedulers = nightly_properties is not None and ENVIRONMENT in [ "PROD" ] track_ids = tracked_branch_ids.keys() @@ -210,7 +210,7 @@ def create_pipeline( continue step_command = create_pipeline_worker_command.withArgs( - devops_env_id, + ENVIRONMENT, track_id, script, step, diff --git a/config/pipeline/doc_api.py b/config/pipeline/doc_api.py index 09c1239..0ff2b07 100644 --- a/config/pipeline/doc_api.py +++ b/config/pipeline/doc_api.py @@ -8,7 +8,7 @@ import conf.branches import pipeline.common -def populate(devops_env_id): +def populate(ENVIRONMENT): properties = [ buildbot.plugins.util.BooleanParameter( name="needs_full_clean", @@ -27,7 +27,7 @@ def populate(devops_env_id): ] return pipeline.common.create_pipeline( - devops_env_id, + ENVIRONMENT, "doc-api", "doc_api.py", [ diff --git a/config/pipeline/doc_developer.py b/config/pipeline/doc_developer.py index 2333f98..a7f03b2 100644 --- a/config/pipeline/doc_developer.py +++ b/config/pipeline/doc_developer.py @@ -7,7 +7,7 @@ import buildbot.plugins import pipeline.common -def populate(devops_env_id): +def populate(ENVIRONMENT): properties = [ buildbot.plugins.util.BooleanParameter( name="needs_package_delivery", @@ -19,7 +19,7 @@ def populate(devops_env_id): ] return pipeline.common.create_pipeline( - devops_env_id, + ENVIRONMENT, "doc-developer", "doc_developer.py", ["update", "compile", "deliver"], diff --git a/config/pipeline/doc_manual.py b/config/pipeline/doc_manual.py index 9b846c4..4743689 100644 --- a/config/pipeline/doc_manual.py +++ b/config/pipeline/doc_manual.py @@ -8,7 +8,7 @@ import conf.branches import pipeline.common -def populate(devops_env_id): +def populate(ENVIRONMENT): properties = [ buildbot.plugins.util.BooleanParameter( name="needs_package_delivery", @@ -27,7 +27,7 @@ def populate(devops_env_id): ] return pipeline.common.create_pipeline( - devops_env_id, + ENVIRONMENT, "doc-manual", "doc_manual.py", ["configure-machine", "update", "compile", "package", "deliver", "clean"], diff --git a/config/pipeline/doc_studio.py b/config/pipeline/doc_studio.py index 279b8f6..cc90408 100644 --- a/config/pipeline/doc_studio.py +++ b/config/pipeline/doc_studio.py @@ -7,7 +7,7 @@ import buildbot.plugins import pipeline.common -def populate(devops_env_id): +def populate(ENVIRONMENT): properties = [ buildbot.plugins.util.BooleanParameter( name="needs_package_delivery", @@ -19,7 +19,7 @@ def populate(devops_env_id): ] return pipeline.common.create_pipeline( - devops_env_id, + ENVIRONMENT, "doc-studio-tools", "doc_studio.py", ["update", "compile", "deliver"], diff --git a/config/setup.py b/config/setup.py index 661c973..abf0730 100644 --- a/config/setup.py +++ b/config/setup.py @@ -31,12 +31,11 @@ importlib.reload(conf.worker) importlib.reload(gitea.blender) importlib.reload(pipeline) -devops_env_id = os.environ.get("DEVOPS_ENV_ID", default="LOCAL") -devops_host_id = os.environ.get("DEVOPS_HOST_ID", default="localhost") +ENVIRONMENT = os.environ.get("ENVIRONMENT", default="LOCAL") def setup() -> Dict[str, Any]: - ####### MAIN - configuration + ####### CONFIGURATION c = {} # Change Source @@ -44,7 +43,7 @@ def setup() -> Dict[str, Any]: # Workers print("*** Creating platform workers") - platform_worker_names = conf.machines.fetch_platform_worker_names(devops_env_id) + platform_worker_names = conf.machines.fetch_platform_worker_names(ENVIRONMENT) workers: List[buildbot.plugins.worker.Worker] = [] configured_worker_names = set() for worker_names in platform_worker_names.values(): @@ -56,7 +55,7 @@ def setup() -> Dict[str, Any]: workers += [ buildbot.plugins.worker.Worker( worker_name, - conf.machines.get_worker_password(devops_env_id, worker_name), + conf.machines.get_worker_password(ENVIRONMENT, worker_name), max_builds=1, keepalive_interval=3600, ) @@ -70,7 +69,7 @@ def setup() -> Dict[str, Any]: c["workers"] = workers # Builders and Schedulers - builders, schedulers = pipeline.populate(devops_env_id) + builders, schedulers = pipeline.populate(ENVIRONMENT) c["builders"] = builders c["schedulers"] = schedulers @@ -80,7 +79,7 @@ def setup() -> Dict[str, Any]: # status of each build will be pushed to these targets. buildbot/reporters/*.py # has a variety to choose from, like IRC bots. - gitea_status_service = gitea.blender.setup_service(devops_env_id) + gitea_status_service = gitea.blender.setup_service(ENVIRONMENT) if gitea_status_service: c["services"] = [gitea_status_service] else: @@ -91,42 +90,33 @@ def setup() -> Dict[str, Any]: # the 'title' string will appear at the top of this buildbot installation's # home pages (linked to the 'titleURL'). - c["title"] = f"Bot - {devops_env_id}" + c["title"] = f"Blender Buildbot - {ENVIRONMENT}" c["titleURL"] = "https://projects.blender.org" # the 'buildbotURL' string should point to the location where the buildbot's # internal web server is visible. This typically uses the port number set in # the 'www' entry below, but with an externally-visible host name which the # buildbot cannot figure out without some help. - c["buildbotURL"] = f"http://{devops_host_id}:8010/" - - if devops_env_id != "LOCAL": - c["buildbotURL"] = f"http://{devops_host_id}:8000/admin/" - - if devops_env_id == "PROD": - c["buildbotURL"] = "https://builder.blender.org/admin/" - if devops_env_id == "UATEST": - c["buildbotURL"] = "https://builder.uatest.blender.org/admin/" + c["buildbotURL"] = os.environ.get("BUILDBOT_WEB_URL", "http://localhost:8010/") # Minimalistic config to activate new web UI c["www"] = dict( - port=8010, plugins=dict(waterfall_view={}, console_view={}, grid_view={}) + port=os.environ.get("BUILDBOT_WEB_PORT", 8010), + plugins=dict(waterfall_view={}, console_view={}, grid_view={}), ) # Database - if devops_env_id == "LOCAL": - c["db"] = {"db_url": "sqlite:///state.sqlite"} - else: - # PostgreSQL database, as recommended for production environment. - c["db"] = {"db_url": "postgresql://buildbot@127.0.0.1/buildbot"} + c["db"] = { + "db_url": os.environ.get("BUILDBOT_DB_URL", "sqlite://").format(**os.environ) + } c["buildbotNetUsageData"] = None # Authentication - c["www"]["auth"] = conf.auth.fetch_authentication(devops_env_id) + c["www"]["auth"] = conf.auth.fetch_authentication(ENVIRONMENT) # Authorization - c["www"]["authz"] = conf.auth.fetch_authorization(devops_env_id) + c["www"]["authz"] = conf.auth.fetch_authorization(ENVIRONMENT) # Disable UI - does not work c["www"]["plugins"] = { @@ -160,8 +150,8 @@ def setup() -> Dict[str, Any]: r"https://projects.blender.org/\1/\2/commit/%s", ) - # Port for workers to connectto - c["protocols"] = {"pb": {"port": 9989}} + # Port for workers to connect to + c["protocols"] = {"pb": {"port": os.environ.get("BUILDBOT_WORKER_PORT", 9989)}} # Disable collapsing requests c["collapseRequests"] = False diff --git a/docker-compose.override.yml b/docker-compose.override.yml index cae2114..103aa33 100644 --- a/docker-compose.override.yml +++ b/docker-compose.override.yml @@ -1,5 +1,24 @@ services: buildbot-master: - env_file: .env + env_file: .env.local volumes: - - ./config:/buildbot/config \ No newline at end of file + - ./config:/buildbot/config + + buildbot-worker: + image: 'buildbot/buildbot-worker:${BUILDBOT_IMAGE_TAG:-v4.1.0}' + restart: unless-stopped + environment: + - 'BUILDMASTER=${BUILDMASTER:-buildbot-master}' + - 'BUILDMASTER_PORT=${BUILDBOT_WORKER_PORT:-9989}' + - 'WORKERNAME=${WORKERNAME:-example-worker}' + - 'WORKERPASS=${WORKERPASS:-pass}' + - 'WORKER_ENVIRONMENT_BLACKLIST=${WORKER_ENVIRONMENT_BLACKLIST:-DOCKER_BUILDBOT* BUILDBOT_ENV_* BUILDBOT_1* WORKER_ENVIRONMENT_BLACKLIST}' + healthcheck: + test: + - CMD + - curl + - '-f' + - 'http://$${BUILDMASTER}:$${BUILDMASTER_PORT}' + interval: 5s + timeout: 20s + retries: 10 \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 3db7be5..190b123 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -18,7 +18,7 @@ services: - CMD - curl - '-f' - - 'http://localhost:$${BUILDBOT_WEB_PORT}' + - '$${BUILDBOT_WEB_URL}' interval: 2s timeout: 10s retries: 15 @@ -41,23 +41,5 @@ services: - 'POSTGRES_PASSWORD=${SERVICE_PASSWORD_POSTGRESQL}' - 'POSTGRES_USER=${SERVICE_USER_POSTGRESQL}' - 'POSTGRES_DB=${POSTGRES_DB:-buildbot}' - buildbot-worker: - image: 'buildbot/buildbot-worker:${BUILDBOT_IMAGE_TAG:-v4.1.0}' - restart: unless-stopped - environment: - - 'BUILDMASTER=${BUILDMASTER:-buildbot-master}' - - 'BUILDMASTER_PORT=${BUILDBOT_WORKER_PORT:-9989}' - - 'WORKERNAME=${WORKERNAME:-example-worker}' - - 'WORKERPASS=${WORKERPASS:-pass}' - - 'WORKER_ENVIRONMENT_BLACKLIST=${WORKER_ENVIRONMENT_BLACKLIST:-DOCKER_BUILDBOT* BUILDBOT_ENV_* BUILDBOT_1* WORKER_ENVIRONMENT_BLACKLIST}' - healthcheck: - test: - - CMD - - curl - - '-f' - - 'http://$${BUILDMASTER}:$${BUILDMASTER_PORT}' - interval: 5s - timeout: 20s - retries: 10 volumes: buildbot-db: {} \ No newline at end of file -- 2.45.2 From 5cc9d7b0e98dc2faa14f96ef004b01b791eafdc3 Mon Sep 17 00:00:00 2001 From: Bart van der Braak Date: Wed, 20 Nov 2024 16:02:13 +0100 Subject: [PATCH 08/13] Get back to original --- .env.local | 5 ++- .gitignore | 28 +++++++++++- Dockerfile | 0 {config => buildbot/config}/conf/__init__.py | 0 {config => buildbot/config}/conf/auth.py | 0 {config => buildbot/config}/conf/branches.py | 0 .../config}/conf/local/__init__.py | 0 .../config}/conf/local/auth.py | 0 .../config}/conf/local/machines.py | 0 .../config}/conf/local/worker.py | 0 {config => buildbot/config}/conf/machines.py | 0 {config => buildbot/config}/conf/worker.py | 0 {config => buildbot/config}/gitea/LICENSE | 0 {config => buildbot/config}/gitea/README.md | 0 {config => buildbot/config}/gitea/__init__.py | 0 {config => buildbot/config}/gitea/blender.py | 0 {config => buildbot/config}/gitea/reporter.py | 0 .../config}/pipeline/__init__.py | 0 {config => buildbot/config}/pipeline/code.py | 0 .../config}/pipeline/code_benchmark.py | 0 .../config}/pipeline/code_bpy_deploy.py | 0 .../config}/pipeline/code_deploy.py | 0 .../config}/pipeline/code_store.py | 0 .../config}/pipeline/common.py | 0 .../config}/pipeline/doc_api.py | 0 .../config}/pipeline/doc_developer.py | 0 .../config}/pipeline/doc_manual.py | 0 .../config}/pipeline/doc_studio.py | 0 {config => buildbot/config}/setup.py | 0 .../config}/worker/__init__.py | 0 {config => buildbot/config}/worker/archive.py | 0 .../config}/worker/blender/__init__.py | 0 .../config}/worker/blender/benchmark.py | 0 .../worker/blender/blender.applescript | 0 .../config}/worker/blender/bundle_dmg.py | 0 .../config}/worker/blender/compile.py | 0 .../config}/worker/blender/cpack_post.cmake | 0 .../config}/worker/blender/cpack_post.py | 0 .../config}/worker/blender/lint.py | 0 .../config}/worker/blender/msix_package.py | 0 .../config}/worker/blender/pack.py | 0 .../config}/worker/blender/sign.py | 0 .../config}/worker/blender/test.py | 0 .../config}/worker/blender/update.py | 0 .../config}/worker/blender/version.py | 0 {config => buildbot/config}/worker/code.py | 0 .../config}/worker/code_benchmark.py | 0 .../config}/worker/code_bpy_deploy.py | 0 .../config}/worker/code_deploy.py | 0 .../config}/worker/code_store.py | 0 .../config}/worker/configure.py | 0 .../config}/worker/deploy/__init__.py | 0 .../config}/worker/deploy/artifacts.py | 0 .../config}/worker/deploy/monitor.py | 0 .../config}/worker/deploy/pypi.py | 0 .../config}/worker/deploy/snap.py | 0 .../config}/worker/deploy/source.py | 0 .../config}/worker/deploy/steam.py | 0 .../config}/worker/deploy/windows.py | 0 {config => buildbot/config}/worker/doc_api.py | 0 .../config}/worker/doc_developer.py | 0 .../config}/worker/doc_manual.py | 0 .../config}/worker/doc_studio.py | 0 {config => buildbot/config}/worker/utils.py | 0 buildbot/master.cfg | 15 +++++++ config/master.cfg | 8 ---- docker-compose.override.yml | 24 ---------- docker-compose.yml | 45 ++++++++++++++++--- 68 files changed, 83 insertions(+), 42 deletions(-) delete mode 100644 Dockerfile rename {config => buildbot/config}/conf/__init__.py (100%) rename {config => buildbot/config}/conf/auth.py (100%) rename {config => buildbot/config}/conf/branches.py (100%) rename {config => buildbot/config}/conf/local/__init__.py (100%) rename {config => buildbot/config}/conf/local/auth.py (100%) rename {config => buildbot/config}/conf/local/machines.py (100%) rename {config => buildbot/config}/conf/local/worker.py (100%) rename {config => buildbot/config}/conf/machines.py (100%) rename {config => buildbot/config}/conf/worker.py (100%) rename {config => buildbot/config}/gitea/LICENSE (100%) rename {config => buildbot/config}/gitea/README.md (100%) rename {config => buildbot/config}/gitea/__init__.py (100%) rename {config => buildbot/config}/gitea/blender.py (100%) rename {config => buildbot/config}/gitea/reporter.py (100%) rename {config => buildbot/config}/pipeline/__init__.py (100%) rename {config => buildbot/config}/pipeline/code.py (100%) rename {config => buildbot/config}/pipeline/code_benchmark.py (100%) rename {config => buildbot/config}/pipeline/code_bpy_deploy.py (100%) rename {config => buildbot/config}/pipeline/code_deploy.py (100%) rename {config => buildbot/config}/pipeline/code_store.py (100%) rename {config => buildbot/config}/pipeline/common.py (100%) rename {config => buildbot/config}/pipeline/doc_api.py (100%) rename {config => buildbot/config}/pipeline/doc_developer.py (100%) rename {config => buildbot/config}/pipeline/doc_manual.py (100%) rename {config => buildbot/config}/pipeline/doc_studio.py (100%) rename {config => buildbot/config}/setup.py (100%) rename {config => buildbot/config}/worker/__init__.py (100%) rename {config => buildbot/config}/worker/archive.py (100%) rename {config => buildbot/config}/worker/blender/__init__.py (100%) rename {config => buildbot/config}/worker/blender/benchmark.py (100%) rename {config => buildbot/config}/worker/blender/blender.applescript (100%) rename {config => buildbot/config}/worker/blender/bundle_dmg.py (100%) rename {config => buildbot/config}/worker/blender/compile.py (100%) rename {config => buildbot/config}/worker/blender/cpack_post.cmake (100%) rename {config => buildbot/config}/worker/blender/cpack_post.py (100%) rename {config => buildbot/config}/worker/blender/lint.py (100%) rename {config => buildbot/config}/worker/blender/msix_package.py (100%) rename {config => buildbot/config}/worker/blender/pack.py (100%) rename {config => buildbot/config}/worker/blender/sign.py (100%) rename {config => buildbot/config}/worker/blender/test.py (100%) rename {config => buildbot/config}/worker/blender/update.py (100%) rename {config => buildbot/config}/worker/blender/version.py (100%) rename {config => buildbot/config}/worker/code.py (100%) rename {config => buildbot/config}/worker/code_benchmark.py (100%) rename {config => buildbot/config}/worker/code_bpy_deploy.py (100%) rename {config => buildbot/config}/worker/code_deploy.py (100%) rename {config => buildbot/config}/worker/code_store.py (100%) rename {config => buildbot/config}/worker/configure.py (100%) rename {config => buildbot/config}/worker/deploy/__init__.py (100%) rename {config => buildbot/config}/worker/deploy/artifacts.py (100%) rename {config => buildbot/config}/worker/deploy/monitor.py (100%) rename {config => buildbot/config}/worker/deploy/pypi.py (100%) rename {config => buildbot/config}/worker/deploy/snap.py (100%) rename {config => buildbot/config}/worker/deploy/source.py (100%) rename {config => buildbot/config}/worker/deploy/steam.py (100%) rename {config => buildbot/config}/worker/deploy/windows.py (100%) rename {config => buildbot/config}/worker/doc_api.py (100%) rename {config => buildbot/config}/worker/doc_developer.py (100%) rename {config => buildbot/config}/worker/doc_manual.py (100%) rename {config => buildbot/config}/worker/doc_studio.py (100%) rename {config => buildbot/config}/worker/utils.py (100%) create mode 100644 buildbot/master.cfg delete mode 100644 config/master.cfg delete mode 100644 docker-compose.override.yml diff --git a/.env.local b/.env.local index 0cdf0de..8099ed8 100644 --- a/.env.local +++ b/.env.local @@ -1,4 +1,5 @@ -SERVICE_USER_POSTGRESQL=buildbot -SERVICE_PASSWORD_POSTGRESQL=changeme! +POSTGRES_PASSWORD=buildbot +POSTGRES_PASSWORD=changeme! +BUILDBOT_CONFIG_URL='' GITEA_URL=https://projects.blender.org BUILDBOT_WEB_URL=http://localhost:8010/ \ No newline at end of file diff --git a/.gitignore b/.gitignore index 86e252f..8a2abc6 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,29 @@ +# Hidden files .venv +.env.production .env.staging -.env.production \ No newline at end of file + +# Python +__pycache__ +*.py[cod] +Pipfile +Pipfile.lock + +# Editors +*~ +*.swp +*.swo +*# +TAGS +tags + +# Thumbnails +Thumbs.db +ehthumbs.db +Desktop.ini +.DS_Store + +# Local patches and logs +*.patch +*.diff +*.log \ No newline at end of file diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index e69de29..0000000 diff --git a/config/conf/__init__.py b/buildbot/config/conf/__init__.py similarity index 100% rename from config/conf/__init__.py rename to buildbot/config/conf/__init__.py diff --git a/config/conf/auth.py b/buildbot/config/conf/auth.py similarity index 100% rename from config/conf/auth.py rename to buildbot/config/conf/auth.py diff --git a/config/conf/branches.py b/buildbot/config/conf/branches.py similarity index 100% rename from config/conf/branches.py rename to buildbot/config/conf/branches.py diff --git a/config/conf/local/__init__.py b/buildbot/config/conf/local/__init__.py similarity index 100% rename from config/conf/local/__init__.py rename to buildbot/config/conf/local/__init__.py diff --git a/config/conf/local/auth.py b/buildbot/config/conf/local/auth.py similarity index 100% rename from config/conf/local/auth.py rename to buildbot/config/conf/local/auth.py diff --git a/config/conf/local/machines.py b/buildbot/config/conf/local/machines.py similarity index 100% rename from config/conf/local/machines.py rename to buildbot/config/conf/local/machines.py diff --git a/config/conf/local/worker.py b/buildbot/config/conf/local/worker.py similarity index 100% rename from config/conf/local/worker.py rename to buildbot/config/conf/local/worker.py diff --git a/config/conf/machines.py b/buildbot/config/conf/machines.py similarity index 100% rename from config/conf/machines.py rename to buildbot/config/conf/machines.py diff --git a/config/conf/worker.py b/buildbot/config/conf/worker.py similarity index 100% rename from config/conf/worker.py rename to buildbot/config/conf/worker.py diff --git a/config/gitea/LICENSE b/buildbot/config/gitea/LICENSE similarity index 100% rename from config/gitea/LICENSE rename to buildbot/config/gitea/LICENSE diff --git a/config/gitea/README.md b/buildbot/config/gitea/README.md similarity index 100% rename from config/gitea/README.md rename to buildbot/config/gitea/README.md diff --git a/config/gitea/__init__.py b/buildbot/config/gitea/__init__.py similarity index 100% rename from config/gitea/__init__.py rename to buildbot/config/gitea/__init__.py diff --git a/config/gitea/blender.py b/buildbot/config/gitea/blender.py similarity index 100% rename from config/gitea/blender.py rename to buildbot/config/gitea/blender.py diff --git a/config/gitea/reporter.py b/buildbot/config/gitea/reporter.py similarity index 100% rename from config/gitea/reporter.py rename to buildbot/config/gitea/reporter.py diff --git a/config/pipeline/__init__.py b/buildbot/config/pipeline/__init__.py similarity index 100% rename from config/pipeline/__init__.py rename to buildbot/config/pipeline/__init__.py diff --git a/config/pipeline/code.py b/buildbot/config/pipeline/code.py similarity index 100% rename from config/pipeline/code.py rename to buildbot/config/pipeline/code.py diff --git a/config/pipeline/code_benchmark.py b/buildbot/config/pipeline/code_benchmark.py similarity index 100% rename from config/pipeline/code_benchmark.py rename to buildbot/config/pipeline/code_benchmark.py diff --git a/config/pipeline/code_bpy_deploy.py b/buildbot/config/pipeline/code_bpy_deploy.py similarity index 100% rename from config/pipeline/code_bpy_deploy.py rename to buildbot/config/pipeline/code_bpy_deploy.py diff --git a/config/pipeline/code_deploy.py b/buildbot/config/pipeline/code_deploy.py similarity index 100% rename from config/pipeline/code_deploy.py rename to buildbot/config/pipeline/code_deploy.py diff --git a/config/pipeline/code_store.py b/buildbot/config/pipeline/code_store.py similarity index 100% rename from config/pipeline/code_store.py rename to buildbot/config/pipeline/code_store.py diff --git a/config/pipeline/common.py b/buildbot/config/pipeline/common.py similarity index 100% rename from config/pipeline/common.py rename to buildbot/config/pipeline/common.py diff --git a/config/pipeline/doc_api.py b/buildbot/config/pipeline/doc_api.py similarity index 100% rename from config/pipeline/doc_api.py rename to buildbot/config/pipeline/doc_api.py diff --git a/config/pipeline/doc_developer.py b/buildbot/config/pipeline/doc_developer.py similarity index 100% rename from config/pipeline/doc_developer.py rename to buildbot/config/pipeline/doc_developer.py diff --git a/config/pipeline/doc_manual.py b/buildbot/config/pipeline/doc_manual.py similarity index 100% rename from config/pipeline/doc_manual.py rename to buildbot/config/pipeline/doc_manual.py diff --git a/config/pipeline/doc_studio.py b/buildbot/config/pipeline/doc_studio.py similarity index 100% rename from config/pipeline/doc_studio.py rename to buildbot/config/pipeline/doc_studio.py diff --git a/config/setup.py b/buildbot/config/setup.py similarity index 100% rename from config/setup.py rename to buildbot/config/setup.py diff --git a/config/worker/__init__.py b/buildbot/config/worker/__init__.py similarity index 100% rename from config/worker/__init__.py rename to buildbot/config/worker/__init__.py diff --git a/config/worker/archive.py b/buildbot/config/worker/archive.py similarity index 100% rename from config/worker/archive.py rename to buildbot/config/worker/archive.py diff --git a/config/worker/blender/__init__.py b/buildbot/config/worker/blender/__init__.py similarity index 100% rename from config/worker/blender/__init__.py rename to buildbot/config/worker/blender/__init__.py diff --git a/config/worker/blender/benchmark.py b/buildbot/config/worker/blender/benchmark.py similarity index 100% rename from config/worker/blender/benchmark.py rename to buildbot/config/worker/blender/benchmark.py diff --git a/config/worker/blender/blender.applescript b/buildbot/config/worker/blender/blender.applescript similarity index 100% rename from config/worker/blender/blender.applescript rename to buildbot/config/worker/blender/blender.applescript diff --git a/config/worker/blender/bundle_dmg.py b/buildbot/config/worker/blender/bundle_dmg.py similarity index 100% rename from config/worker/blender/bundle_dmg.py rename to buildbot/config/worker/blender/bundle_dmg.py diff --git a/config/worker/blender/compile.py b/buildbot/config/worker/blender/compile.py similarity index 100% rename from config/worker/blender/compile.py rename to buildbot/config/worker/blender/compile.py diff --git a/config/worker/blender/cpack_post.cmake b/buildbot/config/worker/blender/cpack_post.cmake similarity index 100% rename from config/worker/blender/cpack_post.cmake rename to buildbot/config/worker/blender/cpack_post.cmake diff --git a/config/worker/blender/cpack_post.py b/buildbot/config/worker/blender/cpack_post.py similarity index 100% rename from config/worker/blender/cpack_post.py rename to buildbot/config/worker/blender/cpack_post.py diff --git a/config/worker/blender/lint.py b/buildbot/config/worker/blender/lint.py similarity index 100% rename from config/worker/blender/lint.py rename to buildbot/config/worker/blender/lint.py diff --git a/config/worker/blender/msix_package.py b/buildbot/config/worker/blender/msix_package.py similarity index 100% rename from config/worker/blender/msix_package.py rename to buildbot/config/worker/blender/msix_package.py diff --git a/config/worker/blender/pack.py b/buildbot/config/worker/blender/pack.py similarity index 100% rename from config/worker/blender/pack.py rename to buildbot/config/worker/blender/pack.py diff --git a/config/worker/blender/sign.py b/buildbot/config/worker/blender/sign.py similarity index 100% rename from config/worker/blender/sign.py rename to buildbot/config/worker/blender/sign.py diff --git a/config/worker/blender/test.py b/buildbot/config/worker/blender/test.py similarity index 100% rename from config/worker/blender/test.py rename to buildbot/config/worker/blender/test.py diff --git a/config/worker/blender/update.py b/buildbot/config/worker/blender/update.py similarity index 100% rename from config/worker/blender/update.py rename to buildbot/config/worker/blender/update.py diff --git a/config/worker/blender/version.py b/buildbot/config/worker/blender/version.py similarity index 100% rename from config/worker/blender/version.py rename to buildbot/config/worker/blender/version.py diff --git a/config/worker/code.py b/buildbot/config/worker/code.py similarity index 100% rename from config/worker/code.py rename to buildbot/config/worker/code.py diff --git a/config/worker/code_benchmark.py b/buildbot/config/worker/code_benchmark.py similarity index 100% rename from config/worker/code_benchmark.py rename to buildbot/config/worker/code_benchmark.py diff --git a/config/worker/code_bpy_deploy.py b/buildbot/config/worker/code_bpy_deploy.py similarity index 100% rename from config/worker/code_bpy_deploy.py rename to buildbot/config/worker/code_bpy_deploy.py diff --git a/config/worker/code_deploy.py b/buildbot/config/worker/code_deploy.py similarity index 100% rename from config/worker/code_deploy.py rename to buildbot/config/worker/code_deploy.py diff --git a/config/worker/code_store.py b/buildbot/config/worker/code_store.py similarity index 100% rename from config/worker/code_store.py rename to buildbot/config/worker/code_store.py diff --git a/config/worker/configure.py b/buildbot/config/worker/configure.py similarity index 100% rename from config/worker/configure.py rename to buildbot/config/worker/configure.py diff --git a/config/worker/deploy/__init__.py b/buildbot/config/worker/deploy/__init__.py similarity index 100% rename from config/worker/deploy/__init__.py rename to buildbot/config/worker/deploy/__init__.py diff --git a/config/worker/deploy/artifacts.py b/buildbot/config/worker/deploy/artifacts.py similarity index 100% rename from config/worker/deploy/artifacts.py rename to buildbot/config/worker/deploy/artifacts.py diff --git a/config/worker/deploy/monitor.py b/buildbot/config/worker/deploy/monitor.py similarity index 100% rename from config/worker/deploy/monitor.py rename to buildbot/config/worker/deploy/monitor.py diff --git a/config/worker/deploy/pypi.py b/buildbot/config/worker/deploy/pypi.py similarity index 100% rename from config/worker/deploy/pypi.py rename to buildbot/config/worker/deploy/pypi.py diff --git a/config/worker/deploy/snap.py b/buildbot/config/worker/deploy/snap.py similarity index 100% rename from config/worker/deploy/snap.py rename to buildbot/config/worker/deploy/snap.py diff --git a/config/worker/deploy/source.py b/buildbot/config/worker/deploy/source.py similarity index 100% rename from config/worker/deploy/source.py rename to buildbot/config/worker/deploy/source.py diff --git a/config/worker/deploy/steam.py b/buildbot/config/worker/deploy/steam.py similarity index 100% rename from config/worker/deploy/steam.py rename to buildbot/config/worker/deploy/steam.py diff --git a/config/worker/deploy/windows.py b/buildbot/config/worker/deploy/windows.py similarity index 100% rename from config/worker/deploy/windows.py rename to buildbot/config/worker/deploy/windows.py diff --git a/config/worker/doc_api.py b/buildbot/config/worker/doc_api.py similarity index 100% rename from config/worker/doc_api.py rename to buildbot/config/worker/doc_api.py diff --git a/config/worker/doc_developer.py b/buildbot/config/worker/doc_developer.py similarity index 100% rename from config/worker/doc_developer.py rename to buildbot/config/worker/doc_developer.py diff --git a/config/worker/doc_manual.py b/buildbot/config/worker/doc_manual.py similarity index 100% rename from config/worker/doc_manual.py rename to buildbot/config/worker/doc_manual.py diff --git a/config/worker/doc_studio.py b/buildbot/config/worker/doc_studio.py similarity index 100% rename from config/worker/doc_studio.py rename to buildbot/config/worker/doc_studio.py diff --git a/config/worker/utils.py b/buildbot/config/worker/utils.py similarity index 100% rename from config/worker/utils.py rename to buildbot/config/worker/utils.py diff --git a/buildbot/master.cfg b/buildbot/master.cfg new file mode 100644 index 0000000..028e381 --- /dev/null +++ b/buildbot/master.cfg @@ -0,0 +1,15 @@ +# -*- python -*- +# ex: set filetype=python: +import importlib +import os +import sys + +# Add the "config" directory to the Python path +sys.path.insert(0, os.path.join(os.path.dirname(__file__), "config")) + +# Import the setup module from the "config" directory +import setup +importlib.reload(setup) + +# Use the setup module as intended +BuildmasterConfig = setup.setup() diff --git a/config/master.cfg b/config/master.cfg deleted file mode 100644 index b1e1d40..0000000 --- a/config/master.cfg +++ /dev/null @@ -1,8 +0,0 @@ -# -*- python -*- -# ex: set filetype=python: -import importlib -import os -import sys -import setup -importlib.reload(setup) -BuildmasterConfig = setup.setup() diff --git a/docker-compose.override.yml b/docker-compose.override.yml deleted file mode 100644 index 103aa33..0000000 --- a/docker-compose.override.yml +++ /dev/null @@ -1,24 +0,0 @@ -services: - buildbot-master: - env_file: .env.local - volumes: - - ./config:/buildbot/config - - buildbot-worker: - image: 'buildbot/buildbot-worker:${BUILDBOT_IMAGE_TAG:-v4.1.0}' - restart: unless-stopped - environment: - - 'BUILDMASTER=${BUILDMASTER:-buildbot-master}' - - 'BUILDMASTER_PORT=${BUILDBOT_WORKER_PORT:-9989}' - - 'WORKERNAME=${WORKERNAME:-example-worker}' - - 'WORKERPASS=${WORKERPASS:-pass}' - - 'WORKER_ENVIRONMENT_BLACKLIST=${WORKER_ENVIRONMENT_BLACKLIST:-DOCKER_BUILDBOT* BUILDBOT_ENV_* BUILDBOT_1* WORKER_ENVIRONMENT_BLACKLIST}' - healthcheck: - test: - - CMD - - curl - - '-f' - - 'http://$${BUILDMASTER}:$${BUILDMASTER_PORT}' - interval: 5s - timeout: 20s - retries: 10 \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 190b123..d4a1609 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,16 +1,18 @@ services: buildbot-master: image: 'buildbot/buildbot-master:${BUILDBOT_IMAGE_TAG:-v4.1.0}' + hostname: buildbot-master restart: unless-stopped + ports: + - 8010:8010 environment: - - 'HOSTNAME=${MASTER_HOSTNAME:-buildbot-master-1}' - 'BUILDBOT_CONFIG_DIR=${BUILDBOT_CONFIG_DIR:-config}' - - 'BUILDBOT_CONFIG_URL=${BUILDBOT_CONFIG_URL:-https://git.braak.pro/api/packages/bartvdbraak/generic/builder.braak.pro/main/config.tar.gz}' + # - 'BUILDBOT_CONFIG_URL=${BUILDBOT_CONFIG_URL:-https://git.braak.pro/api/packages/bartvdbraak/generic/builder.braak.pro/main/config.tar.gz}' - 'BUILDBOT_WORKER_PORT=${BUILDBOT_WORKER_PORT:-9989}' - 'BUILDBOT_WEB_URL=${BUILDBOT_WEB_URL:-http://localhost:8010/}' - 'BUILDBOT_WEB_PORT=${BUILDBOT_WEB_PORT:-tcp:port=8010}' - - 'POSTGRES_PASSWORD=${SERVICE_PASSWORD_POSTGRESQL}' - - 'POSTGRES_USER=${SERVICE_USER_POSTGRESQL}' + - 'POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-changeme123}' + - 'POSTGRES_USER=${POSTGRES_USER:-buildbot}' - 'POSTGRES_DB=${POSTGRES_DB:-buildbot}' - 'BUILDBOT_DB_URL=postgresql+psycopg2://{POSTGRES_USER}:{POSTGRES_PASSWORD}@postgresql/{POSTGRES_DB}' healthcheck: @@ -25,6 +27,11 @@ services: depends_on: postgresql: condition: service_healthy + volumes: + - ./buildbot/config:/buildbot/config + - ./buildbot/master.cfg:/buildbot/master.cfg + networks: + buildbot: null postgresql: image: 'postgres:${POSTGRES_IMAGE_TAG:-16-alpine}' restart: unless-stopped @@ -38,8 +45,32 @@ services: volumes: - 'buildbot-db:/var/lib/postgresql/data' environment: - - 'POSTGRES_PASSWORD=${SERVICE_PASSWORD_POSTGRESQL}' - - 'POSTGRES_USER=${SERVICE_USER_POSTGRESQL}' + - 'POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-changeme123}' + - 'POSTGRES_USER=${POSTGRES_USER:-buildbot}' - 'POSTGRES_DB=${POSTGRES_DB:-buildbot}' + networks: + buildbot: null + buildbot-worker: + image: 'buildbot/buildbot-worker:${BUILDBOT_IMAGE_TAG:-v4.1.0}' + restart: unless-stopped + environment: + - 'BUILDMASTER=${BUILDMASTER:-buildbot-master}' + - 'BUILDMASTER_PORT=${BUILDBOT_WORKER_PORT:-9989}' + - 'WORKERNAME=${WORKERNAME:-example-worker}' + - 'WORKERPASS=${WORKERPASS:-pass}' + - 'WORKER_ENVIRONMENT_BLACKLIST=${WORKER_ENVIRONMENT_BLACKLIST:-DOCKER_BUILDBOT* BUILDBOT_ENV_* BUILDBOT_1* WORKER_ENVIRONMENT_BLACKLIST}' + healthcheck: + test: + - CMD + - curl + - '-f' + - 'http://$${BUILDMASTER}:$${BUILDMASTER_PORT}' + interval: 5s + timeout: 20s + retries: 10 + networks: + buildbot: null volumes: - buildbot-db: {} \ No newline at end of file + buildbot-db: {} +networks: + buildbot: {} \ No newline at end of file -- 2.45.2 From 0f45b3843276b9da993e95cb843db491e028ebbe Mon Sep 17 00:00:00 2001 From: Bart van der Braak Date: Wed, 20 Nov 2024 18:20:13 +0100 Subject: [PATCH 09/13] Working setup using localhost --- .env.local | 5 ----- buildbot/config/conf/local/machines.py | 2 +- buildbot/config/setup.py | 16 ++++++++-------- docker-compose.yml | 7 ++++--- 4 files changed, 13 insertions(+), 17 deletions(-) delete mode 100644 .env.local diff --git a/.env.local b/.env.local deleted file mode 100644 index 8099ed8..0000000 --- a/.env.local +++ /dev/null @@ -1,5 +0,0 @@ -POSTGRES_PASSWORD=buildbot -POSTGRES_PASSWORD=changeme! -BUILDBOT_CONFIG_URL='' -GITEA_URL=https://projects.blender.org -BUILDBOT_WEB_URL=http://localhost:8010/ \ No newline at end of file diff --git a/buildbot/config/conf/local/machines.py b/buildbot/config/conf/local/machines.py index fb430e7..e2d9938 100644 --- a/buildbot/config/conf/local/machines.py +++ b/buildbot/config/conf/local/machines.py @@ -27,5 +27,5 @@ def get_worker_password(worker_name: str) -> str: return "localhost" -def get_worker_names(ENVIRONMENT: str): +def get_worker_names(environment: str): return _worker_names diff --git a/buildbot/config/setup.py b/buildbot/config/setup.py index abf0730..f1ee021 100644 --- a/buildbot/config/setup.py +++ b/buildbot/config/setup.py @@ -31,7 +31,7 @@ importlib.reload(conf.worker) importlib.reload(gitea.blender) importlib.reload(pipeline) -ENVIRONMENT = os.environ.get("ENVIRONMENT", default="LOCAL") +environment = os.environ.get("BUILDBOT_environment", default="LOCAL") def setup() -> Dict[str, Any]: @@ -43,7 +43,7 @@ def setup() -> Dict[str, Any]: # Workers print("*** Creating platform workers") - platform_worker_names = conf.machines.fetch_platform_worker_names(ENVIRONMENT) + platform_worker_names = conf.machines.fetch_platform_worker_names(environment) workers: List[buildbot.plugins.worker.Worker] = [] configured_worker_names = set() for worker_names in platform_worker_names.values(): @@ -55,7 +55,7 @@ def setup() -> Dict[str, Any]: workers += [ buildbot.plugins.worker.Worker( worker_name, - conf.machines.get_worker_password(ENVIRONMENT, worker_name), + conf.machines.get_worker_password(environment, worker_name), max_builds=1, keepalive_interval=3600, ) @@ -69,7 +69,7 @@ def setup() -> Dict[str, Any]: c["workers"] = workers # Builders and Schedulers - builders, schedulers = pipeline.populate(ENVIRONMENT) + builders, schedulers = pipeline.populate(environment) c["builders"] = builders c["schedulers"] = schedulers @@ -79,7 +79,7 @@ def setup() -> Dict[str, Any]: # status of each build will be pushed to these targets. buildbot/reporters/*.py # has a variety to choose from, like IRC bots. - gitea_status_service = gitea.blender.setup_service(ENVIRONMENT) + gitea_status_service = gitea.blender.setup_service(environment) if gitea_status_service: c["services"] = [gitea_status_service] else: @@ -90,7 +90,7 @@ def setup() -> Dict[str, Any]: # the 'title' string will appear at the top of this buildbot installation's # home pages (linked to the 'titleURL'). - c["title"] = f"Blender Buildbot - {ENVIRONMENT}" + c["title"] = f"Blender Buildbot - {environment}" c["titleURL"] = "https://projects.blender.org" # the 'buildbotURL' string should point to the location where the buildbot's @@ -113,10 +113,10 @@ def setup() -> Dict[str, Any]: c["buildbotNetUsageData"] = None # Authentication - c["www"]["auth"] = conf.auth.fetch_authentication(ENVIRONMENT) + c["www"]["auth"] = conf.auth.fetch_authentication(environment) # Authorization - c["www"]["authz"] = conf.auth.fetch_authorization(ENVIRONMENT) + c["www"]["authz"] = conf.auth.fetch_authorization(environment) # Disable UI - does not work c["www"]["plugins"] = { diff --git a/docker-compose.yml b/docker-compose.yml index d4a1609..19a7868 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -7,7 +7,7 @@ services: - 8010:8010 environment: - 'BUILDBOT_CONFIG_DIR=${BUILDBOT_CONFIG_DIR:-config}' - # - 'BUILDBOT_CONFIG_URL=${BUILDBOT_CONFIG_URL:-https://git.braak.pro/api/packages/bartvdbraak/generic/builder.braak.pro/main/config.tar.gz}' + - 'BUILDBOT_ENVIRONMENT=${BUILDBOT_ENVIRONMENT:-LOCAL}' - 'BUILDBOT_WORKER_PORT=${BUILDBOT_WORKER_PORT:-9989}' - 'BUILDBOT_WEB_URL=${BUILDBOT_WEB_URL:-http://localhost:8010/}' - 'BUILDBOT_WEB_PORT=${BUILDBOT_WEB_PORT:-tcp:port=8010}' @@ -56,8 +56,9 @@ services: environment: - 'BUILDMASTER=${BUILDMASTER:-buildbot-master}' - 'BUILDMASTER_PORT=${BUILDBOT_WORKER_PORT:-9989}' - - 'WORKERNAME=${WORKERNAME:-example-worker}' - - 'WORKERPASS=${WORKERPASS:-pass}' + - 'BUILDBOT_ENVIRONMENT=${BUILDBOT_ENVIRONMENT:-LOCAL}' + - 'WORKERNAME=${WORKERNAME:-localhost}' + - 'WORKERPASS=${WORKERPASS:-localhost}' - 'WORKER_ENVIRONMENT_BLACKLIST=${WORKER_ENVIRONMENT_BLACKLIST:-DOCKER_BUILDBOT* BUILDBOT_ENV_* BUILDBOT_1* WORKER_ENVIRONMENT_BLACKLIST}' healthcheck: test: -- 2.45.2 From edb56e96dc3a88255950f030c013ee8bcaf9ebf3 Mon Sep 17 00:00:00 2001 From: Bart van der Braak Date: Wed, 20 Nov 2024 23:59:35 +0100 Subject: [PATCH 10/13] Implement authentication via Gitea --- .gitignore | 3 +- Makefile | 5 ++- buildbot/config/conf/auth.py | 14 ++++----- buildbot/config/conf/local/auth.py | 35 +++++++++++++++++---- buildbot/config/conf/machines.py | 14 ++++----- buildbot/config/conf/worker.py | 4 +-- buildbot/config/gitea/blender.py | 6 ++-- buildbot/config/pipeline/__init__.py | 4 +-- buildbot/config/pipeline/code.py | 22 ++++++------- buildbot/config/pipeline/code_benchmark.py | 10 +++--- buildbot/config/pipeline/code_bpy_deploy.py | 4 +-- buildbot/config/pipeline/code_deploy.py | 4 +-- buildbot/config/pipeline/code_store.py | 12 +++---- buildbot/config/pipeline/common.py | 22 ++++++------- buildbot/config/pipeline/doc_api.py | 4 +-- buildbot/config/pipeline/doc_developer.py | 4 +-- buildbot/config/pipeline/doc_manual.py | 4 +-- buildbot/config/pipeline/doc_studio.py | 4 +-- buildbot/config/setup.py | 32 +++++++++++++++++-- buildbot/config/worker/blender/__init__.py | 2 +- buildbot/config/worker/utils.py | 4 +-- docker-compose.yml | 3 +- 22 files changed, 134 insertions(+), 82 deletions(-) diff --git a/.gitignore b/.gitignore index 8a2abc6..ff83cdb 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,6 @@ # Hidden files .venv -.env.production -.env.staging +.env* # Python __pycache__ diff --git a/Makefile b/Makefile index 4d8df3d..3a23fb8 100644 --- a/Makefile +++ b/Makefile @@ -19,4 +19,7 @@ check: ## Check linting, formatting and types format: ## Autofix linting and formatting issues ruff check --fix - ruff format \ No newline at end of file + ruff format + +docker: ## Spin up and force recreation of Docker containers + docker compose up --force-recreate --detach \ No newline at end of file diff --git a/buildbot/config/conf/auth.py b/buildbot/config/conf/auth.py index 271d8e7..30cb268 100644 --- a/buildbot/config/conf/auth.py +++ b/buildbot/config/conf/auth.py @@ -7,8 +7,8 @@ import importlib import buildbot.plugins -def _get_auth_config(ENVIRONMENT: str): - if ENVIRONMENT == "LOCAL": +def _get_auth_config(environment: str): + if environment == "LOCAL": import conf.local.auth importlib.reload(conf.local.auth) @@ -20,13 +20,13 @@ def _get_auth_config(ENVIRONMENT: str): return conf.production.auth -def fetch_authentication(ENVIRONMENT: str): - auth_config = _get_auth_config(ENVIRONMENT) - return auth_config.get_authentication(ENVIRONMENT) +def fetch_authentication(environment: str): + auth_config = _get_auth_config(environment) + return auth_config.get_authentication(environment) -def fetch_authorization(ENVIRONMENT: str): - auth_config = _get_auth_config(ENVIRONMENT) +def fetch_authorization(environment: str): + auth_config = _get_auth_config(environment) admin_usernames = auth_config.admin_usernames deploy_dev_usernames = auth_config.deploy_dev_usernames diff --git a/buildbot/config/conf/local/auth.py b/buildbot/config/conf/local/auth.py index 0c1eb51..30eb7c2 100644 --- a/buildbot/config/conf/local/auth.py +++ b/buildbot/config/conf/local/auth.py @@ -2,7 +2,10 @@ # SPDX-FileCopyrightText: 2011-2024 Blender Authors # -import buildbot.plugins +# import buildbot.plugins +import os +from buildbot.www.oauth2 import OAuth2Auth +from urllib.parse import urljoin # Buildbot admin with access to everything. admin_usernames = [ @@ -19,10 +22,30 @@ trusted_dev_usernames = [ "admin", ] +gitea_endpoint = os.environ.get("GITEA_ENDPOINT", default="") +gitea_client_id = os.environ.get("GITEA_CLIENT_ID", default="") +gitea_client_secret = os.environ.get("GITEA_CLIENT_SECRET", default="") -def get_authentication(ENVIRONMENT: str): - class LocalEnvAuth(buildbot.plugins.util.CustomAuth): - def check_credentials(self, user, password): - return user.decode() == "admin" and password.decode() == "admin" - return LocalEnvAuth() +def get_authentication(environment: str): + class GiteaAuth(OAuth2Auth): + name = "projects.blender.org" + faIcon = "fa-cogs" + + AUTH_URL = "login/oauth/authorize" + TOKEN_URL = "login/oauth/access_token" + + def __init__(self, endpoint, client_id, client_secret, **kwargs): + super(GiteaAuth, self).__init__(client_id, client_secret, **kwargs) + self.resourceEndpoint = endpoint + self.authUri = urljoin(endpoint, self.AUTH_URL) + self.tokenUri = urljoin(endpoint, self.TOKEN_URL) + + def getUserInfoFromOAuthClient(self, c): + return self.get(c, "/api/v1/user") + + # class LocalEnvAuth(buildbot.plugins.util.CustomAuth): + # def check_credentials(self, user, password): + # return user.decode() == "admin" and password.decode() == "admin" + + return GiteaAuth(gitea_endpoint, gitea_client_id, gitea_client_secret) diff --git a/buildbot/config/conf/machines.py b/buildbot/config/conf/machines.py index 4c36e83..833f7ed 100644 --- a/buildbot/config/conf/machines.py +++ b/buildbot/config/conf/machines.py @@ -5,8 +5,8 @@ import importlib -def _get_config(ENVIRONMENT: str): - if ENVIRONMENT == "LOCAL": +def _get_config(environment: str): + if environment == "LOCAL": import conf.local.machines importlib.reload(conf.local.machines) @@ -18,13 +18,13 @@ def _get_config(ENVIRONMENT: str): return conf.production.machines -def fetch_platform_worker_names(ENVIRONMENT: str): - machines_config = _get_config(ENVIRONMENT) - return machines_config.get_worker_names(ENVIRONMENT) +def fetch_platform_worker_names(environment: str): + machines_config = _get_config(environment) + return machines_config.get_worker_names(environment) -def get_worker_password(ENVIRONMENT: str, worker_name: str) -> str: - machines_config = _get_config(ENVIRONMENT) +def get_worker_password(environment: str, worker_name: str) -> str: + machines_config = _get_config(environment) return machines_config.get_worker_password(worker_name) diff --git a/buildbot/config/conf/worker.py b/buildbot/config/conf/worker.py index 60adb0c..1c019a3 100644 --- a/buildbot/config/conf/worker.py +++ b/buildbot/config/conf/worker.py @@ -7,8 +7,8 @@ import importlib from typing import Any -def get_config(ENVIRONMENT: str) -> Any: - if ENVIRONMENT == "LOCAL": +def get_config(environment: str) -> Any: + if environment == "LOCAL": import conf.local.worker importlib.reload(conf.local.worker) diff --git a/buildbot/config/gitea/blender.py b/buildbot/config/gitea/blender.py index 4d0f1c4..e9720f3 100644 --- a/buildbot/config/gitea/blender.py +++ b/buildbot/config/gitea/blender.py @@ -22,12 +22,12 @@ gitea_api_token = None gitea_status_service = None -def setup_service(ENVIRONMENT: str): +def setup_service(environment: str): import conf.worker importlib.reload(conf.worker) - worker_config = conf.worker.get_config(ENVIRONMENT) - gitea_api_token = worker_config.gitea_api_token(ENVIRONMENT) + worker_config = conf.worker.get_config(environment) + gitea_api_token = worker_config.gitea_api_token(environment) if gitea_api_token: log.msg("Found Gitea API token, enabling status push") diff --git a/buildbot/config/pipeline/__init__.py b/buildbot/config/pipeline/__init__.py index e262cf0..3b9f313 100644 --- a/buildbot/config/pipeline/__init__.py +++ b/buildbot/config/pipeline/__init__.py @@ -23,7 +23,7 @@ importlib.reload(pipeline.common) importlib.reload(conf.branches) -def populate(ENVIRONMENT): +def populate(environment): pipelines_modules = [ pipeline.code, pipeline.code_benchmark, @@ -41,7 +41,7 @@ def populate(ENVIRONMENT): for pipelines_module in pipelines_modules: importlib.reload(pipelines_module) - b, s = pipelines_module.populate(ENVIRONMENT) + b, s = pipelines_module.populate(environment) builders += b schedulers += s diff --git a/buildbot/config/pipeline/code.py b/buildbot/config/pipeline/code.py index 68e33ea..f92076f 100644 --- a/buildbot/config/pipeline/code.py +++ b/buildbot/config/pipeline/code.py @@ -233,7 +233,7 @@ scheduler_properties = { @buildbot.plugins.util.renderer def create_code_worker_command_args( - props, ENVIRONMENT, track_id, pipeline_type, step_name + props, environment, track_id, pipeline_type, step_name ): commit_id = pipeline.common.fetch_property(props, key="revision", default="HEAD") patch_id = pipeline.common.fetch_property(props, key="patch_id", default="") @@ -294,7 +294,7 @@ def create_code_worker_command_args( args += [step_name] - return pipeline.common.create_worker_command("code.py", ENVIRONMENT, track_id, args) + return pipeline.common.create_worker_command("code.py", environment, track_id, args) def needs_do_code_pipeline_step(step): @@ -449,17 +449,17 @@ class PlatformTrigger(plugins_steps.Trigger): return schedulers -def populate(ENVIRONMENT): +def populate(environment): builders = [] schedulers = [] - platform_worker_names = conf.machines.fetch_platform_worker_names(ENVIRONMENT) + platform_worker_names = conf.machines.fetch_platform_worker_names(environment) local_worker_names = conf.machines.fetch_local_worker_names() - worker_config = conf.worker.get_config(ENVIRONMENT) + worker_config = conf.worker.get_config(environment) - needs_incremental_schedulers = ENVIRONMENT in ["PROD"] - needs_nightly_schedulers = ENVIRONMENT in ["PROD"] + needs_incremental_schedulers = environment in ["PROD"] + needs_nightly_schedulers = environment in ["PROD"] print("*** Creating [code] pipeline") for track_id in code_track_ids: @@ -491,7 +491,7 @@ def populate(ENVIRONMENT): step_timeout_in_seconds = compile_gpu_step_timeout_in_seconds step_command = create_code_worker_command_args.withArgs( - ENVIRONMENT, track_id, pipeline_type, step_name + environment, track_id, pipeline_type, step_name ) step = buildbot.plugins.steps.ShellCommand( @@ -510,7 +510,7 @@ def populate(ENVIRONMENT): for master_step_name in pipeline.common.code_pipeline_master_step_names: master_step_command = ( pipeline.common.create_master_command_args.withArgs( - ENVIRONMENT, + environment, track_id, pipeline_type, master_step_name, @@ -534,7 +534,7 @@ def populate(ENVIRONMENT): pipeline_lint_factory = buildbot.plugins.util.BuildFactory() for step_name in code_pipeline_lint_step_names: step_command = create_code_worker_command_args.withArgs( - ENVIRONMENT, track_id, pipeline_type, step_name + environment, track_id, pipeline_type, step_name ) pipeline_lint_factory.addStep( @@ -574,7 +574,7 @@ def populate(ENVIRONMENT): suitable_pipeline_worker_names = pipeline_worker_names if ( platform_architecture == "linux-x86_64" - and ENVIRONMENT != "LOCAL" + and environment != "LOCAL" ): selector = "rocky" suitable_pipeline_worker_names = [ diff --git a/buildbot/config/pipeline/code_benchmark.py b/buildbot/config/pipeline/code_benchmark.py index 55aa315..6b1b6f0 100644 --- a/buildbot/config/pipeline/code_benchmark.py +++ b/buildbot/config/pipeline/code_benchmark.py @@ -26,8 +26,8 @@ class LinkMultipleFileUpload(plugins_steps.MultipleFileUpload): return -def create_deliver_step(ENVIRONMENT): - worker_config = conf.worker.get_config(ENVIRONMENT) +def create_deliver_step(environment): + worker_config = conf.worker.get_config(environment) file_size_in_mb = 500 * 1024 * 1024 worker_source_path = pathlib.Path("../../../../git/blender-vdev/build_package") @@ -48,7 +48,7 @@ def create_deliver_step(ENVIRONMENT): ) -def populate(ENVIRONMENT): +def populate(environment): properties = [ buildbot.plugins.util.StringParameter( name="commit_id", @@ -68,7 +68,7 @@ def populate(ENVIRONMENT): ] return pipeline.common.create_pipeline( - ENVIRONMENT, + environment, "code-benchmark", "code_benchmark.py", [ @@ -78,7 +78,7 @@ def populate(ENVIRONMENT): "compile-gpu", "compile-install", "benchmark", - partial(create_deliver_step, ENVIRONMENT), + partial(create_deliver_step, environment), "clean", ], {"vdev": "main"}, diff --git a/buildbot/config/pipeline/code_bpy_deploy.py b/buildbot/config/pipeline/code_bpy_deploy.py index b5e94d0..3b6b6c3 100644 --- a/buildbot/config/pipeline/code_bpy_deploy.py +++ b/buildbot/config/pipeline/code_bpy_deploy.py @@ -9,11 +9,11 @@ import conf.branches import pipeline.common -def populate(ENVIRONMENT): +def populate(environment): properties = [] return pipeline.common.create_pipeline( - ENVIRONMENT, + environment, "code-bpy-deploy", "code_bpy_deploy.py", [ diff --git a/buildbot/config/pipeline/code_deploy.py b/buildbot/config/pipeline/code_deploy.py index d4c9d8f..158248e 100644 --- a/buildbot/config/pipeline/code_deploy.py +++ b/buildbot/config/pipeline/code_deploy.py @@ -10,7 +10,7 @@ import conf.branches import pipeline.common -def populate(ENVIRONMENT): +def populate(environment): properties = [ buildbot.plugins.util.BooleanParameter( name="needs_full_clean", @@ -22,7 +22,7 @@ def populate(ENVIRONMENT): ] return pipeline.common.create_pipeline( - ENVIRONMENT, + environment, "code-artifacts-deploy", "code_deploy.py", [ diff --git a/buildbot/config/pipeline/code_store.py b/buildbot/config/pipeline/code_store.py index c4d2f15..5e0a509 100644 --- a/buildbot/config/pipeline/code_store.py +++ b/buildbot/config/pipeline/code_store.py @@ -62,16 +62,16 @@ def create_deliver_binaries_windows_step(worker_config, track_id, pipeline_type) ) -def populate(ENVIRONMENT): +def populate(environment): builders = [] schedulers = [] - platform_worker_names = conf.machines.fetch_platform_worker_names(ENVIRONMENT) + platform_worker_names = conf.machines.fetch_platform_worker_names(environment) local_worker_names = conf.machines.fetch_local_worker_names() - worker_config = conf.worker.get_config(ENVIRONMENT) + worker_config = conf.worker.get_config(environment) - needs_nightly_schedulers = ENVIRONMENT == "PROD" + needs_nightly_schedulers = environment == "PROD" pipeline_type = "daily" @@ -108,7 +108,7 @@ def populate(ENVIRONMENT): else: args = ["--store-id", store_id, step_name] step_command = pipeline.common.create_worker_command( - "code_store.py", ENVIRONMENT, track_id, args + "code_store.py", environment, track_id, args ) step = plugins_steps.ShellCommand( @@ -126,7 +126,7 @@ def populate(ENVIRONMENT): for master_step_name in pipeline.common.code_pipeline_master_step_names: master_step_command = ( pipeline.common.create_master_command_args.withArgs( - ENVIRONMENT, + environment, track_id, pipeline_type, master_step_name, diff --git a/buildbot/config/pipeline/common.py b/buildbot/config/pipeline/common.py index 98b92bf..ee12761 100644 --- a/buildbot/config/pipeline/common.py +++ b/buildbot/config/pipeline/common.py @@ -57,7 +57,7 @@ def needs_do_doc_pipeline_step(step): return True -def create_worker_command(script, ENVIRONMENT, track_id, args): +def create_worker_command(script, environment, track_id, args): # This relative path assume were are in: # ~/.devops/services/buildbot-worker//build # There appears to be no way to expand a tilde here? @@ -71,7 +71,7 @@ def create_worker_command(script, ENVIRONMENT, track_id, args): "--track-id", track_id, "--service-env-id", - ENVIRONMENT, + environment, ] return cmd + list(args) @@ -79,7 +79,7 @@ def create_worker_command(script, ENVIRONMENT, track_id, args): @buildbot.plugins.util.renderer def create_master_command_args( - props, ENVIRONMENT, track_id, pipeline_type, step_name, single_platform + props, environment, track_id, pipeline_type, step_name, single_platform ): build_configuration = fetch_property( props, key="build_configuration", default="release" @@ -116,7 +116,7 @@ def create_master_command_args( "--track-id", track_id, "--service-env-id", - ENVIRONMENT, + environment, ] return cmd + list(args) @@ -125,7 +125,7 @@ def create_master_command_args( @buildbot.plugins.util.renderer def create_pipeline_worker_command( props, - ENVIRONMENT, + environment, track_id, script, step_name, @@ -154,11 +154,11 @@ def create_pipeline_worker_command( if "revision" in props and props["revision"]: args += ["--commit-id", props["revision"]] - return create_worker_command(script, ENVIRONMENT, track_id, args) + return create_worker_command(script, environment, track_id, args) def create_pipeline( - ENVIRONMENT, + environment, artifact_id, script, steps, @@ -179,13 +179,13 @@ def create_pipeline( builders = [] schedulers = [] - platform_worker_names = conf.machines.fetch_platform_worker_names(ENVIRONMENT) + platform_worker_names = conf.machines.fetch_platform_worker_names(environment) local_worker_names = conf.machines.fetch_local_worker_names() needs_incremental_schedulers = ( - incremental_properties is not None and ENVIRONMENT in ["PROD"] + incremental_properties is not None and environment in ["PROD"] ) - needs_nightly_schedulers = nightly_properties is not None and ENVIRONMENT in [ + needs_nightly_schedulers = nightly_properties is not None and environment in [ "PROD" ] track_ids = tracked_branch_ids.keys() @@ -210,7 +210,7 @@ def create_pipeline( continue step_command = create_pipeline_worker_command.withArgs( - ENVIRONMENT, + environment, track_id, script, step, diff --git a/buildbot/config/pipeline/doc_api.py b/buildbot/config/pipeline/doc_api.py index 0ff2b07..39652c9 100644 --- a/buildbot/config/pipeline/doc_api.py +++ b/buildbot/config/pipeline/doc_api.py @@ -8,7 +8,7 @@ import conf.branches import pipeline.common -def populate(ENVIRONMENT): +def populate(environment): properties = [ buildbot.plugins.util.BooleanParameter( name="needs_full_clean", @@ -27,7 +27,7 @@ def populate(ENVIRONMENT): ] return pipeline.common.create_pipeline( - ENVIRONMENT, + environment, "doc-api", "doc_api.py", [ diff --git a/buildbot/config/pipeline/doc_developer.py b/buildbot/config/pipeline/doc_developer.py index a7f03b2..bd35f36 100644 --- a/buildbot/config/pipeline/doc_developer.py +++ b/buildbot/config/pipeline/doc_developer.py @@ -7,7 +7,7 @@ import buildbot.plugins import pipeline.common -def populate(ENVIRONMENT): +def populate(environment): properties = [ buildbot.plugins.util.BooleanParameter( name="needs_package_delivery", @@ -19,7 +19,7 @@ def populate(ENVIRONMENT): ] return pipeline.common.create_pipeline( - ENVIRONMENT, + environment, "doc-developer", "doc_developer.py", ["update", "compile", "deliver"], diff --git a/buildbot/config/pipeline/doc_manual.py b/buildbot/config/pipeline/doc_manual.py index 4743689..69545b7 100644 --- a/buildbot/config/pipeline/doc_manual.py +++ b/buildbot/config/pipeline/doc_manual.py @@ -8,7 +8,7 @@ import conf.branches import pipeline.common -def populate(ENVIRONMENT): +def populate(environment): properties = [ buildbot.plugins.util.BooleanParameter( name="needs_package_delivery", @@ -27,7 +27,7 @@ def populate(ENVIRONMENT): ] return pipeline.common.create_pipeline( - ENVIRONMENT, + environment, "doc-manual", "doc_manual.py", ["configure-machine", "update", "compile", "package", "deliver", "clean"], diff --git a/buildbot/config/pipeline/doc_studio.py b/buildbot/config/pipeline/doc_studio.py index cc90408..0e6c90a 100644 --- a/buildbot/config/pipeline/doc_studio.py +++ b/buildbot/config/pipeline/doc_studio.py @@ -7,7 +7,7 @@ import buildbot.plugins import pipeline.common -def populate(ENVIRONMENT): +def populate(environment): properties = [ buildbot.plugins.util.BooleanParameter( name="needs_package_delivery", @@ -19,7 +19,7 @@ def populate(ENVIRONMENT): ] return pipeline.common.create_pipeline( - ENVIRONMENT, + environment, "doc-studio-tools", "doc_studio.py", ["update", "compile", "deliver"], diff --git a/buildbot/config/setup.py b/buildbot/config/setup.py index f1ee021..7d73acf 100644 --- a/buildbot/config/setup.py +++ b/buildbot/config/setup.py @@ -31,7 +31,7 @@ importlib.reload(conf.worker) importlib.reload(gitea.blender) importlib.reload(pipeline) -environment = os.environ.get("BUILDBOT_environment", default="LOCAL") +environment = os.environ.get("BUILDBOT_ENVIRONMENT", default="LOCAL") def setup() -> Dict[str, Any]: @@ -90,7 +90,7 @@ def setup() -> Dict[str, Any]: # the 'title' string will appear at the top of this buildbot installation's # home pages (linked to the 'titleURL'). - c["title"] = f"Blender Buildbot - {environment}" + c["title"] = "Builder" c["titleURL"] = "https://projects.blender.org" # the 'buildbotURL' string should point to the location where the buildbot's @@ -103,6 +103,20 @@ def setup() -> Dict[str, Any]: c["www"] = dict( port=os.environ.get("BUILDBOT_WEB_PORT", 8010), plugins=dict(waterfall_view={}, console_view={}, grid_view={}), + theme={ + "bb-sidebar-background-color": "#1F2226", # Eerie Black 2 + "bb-sidebar-header-background-color": "#202327", # Eerie Black + "bb-sidebar-header-text-color": "#9fa3a8", # Dim Gray (Lighter gray for text) + "bb-sidebar-title-text-color": "#9fa3a8", # Dim Gray (Titles) + "bb-sidebar-footer-background-color": "#292d32", # Jet + "bb-sidebar-button-text-color": "#9fa3a8", # Dim Gray (Button text) + "bb-sidebar-button-hover-background-color": "#292d32", # Jet (Button hover background) + "bb-sidebar-button-hover-text-color": "#3dabf5", # Light blue for hover text + "bb-sidebar-button-current-background-color": "#292d32", # Jet (Current button background) + "bb-sidebar-button-current-text-color": "#3dabf5", # Light blue for current button text + "bb-sidebar-stripe-hover-color": "#3695D5", # Celestial Blue + "bb-sidebar-stripe-current-color": "#084F7E", # Indigo Dye + }, ) # Database @@ -116,7 +130,19 @@ def setup() -> Dict[str, Any]: c["www"]["auth"] = conf.auth.fetch_authentication(environment) # Authorization - c["www"]["authz"] = conf.auth.fetch_authorization(environment) + # c["www"]["authz"] = conf.auth.fetch_authorization(environment) + c["www"]["authz"] = buildbot.plugins.util.Authz( + allowRules=[ + buildbot.plugins.util.AnyControlEndpointMatcher( + role="Admins" + ), # Organization teams + ], + roleMatchers=[ + buildbot.plugins.util.RolesFromGroups( + groupPrefix="test-org/" + ) # Gitea organization + ], + ) # Disable UI - does not work c["www"]["plugins"] = { diff --git a/buildbot/config/worker/blender/__init__.py b/buildbot/config/worker/blender/__init__.py index c2fadff..7ea53d2 100644 --- a/buildbot/config/worker/blender/__init__.py +++ b/buildbot/config/worker/blender/__init__.py @@ -47,7 +47,7 @@ class CodeBuilder(worker.utils.Builder): # Call command with in compiler environment. def call( - self, cmd: worker.utils.CmdSequence, env: worker.utils.CmdEnvironment = None + self, cmd: worker.utils.CmdSequence, env: worker.utils.Cmdenvironment = None ) -> int: cmd_prefix: worker.utils.CmdList = [] diff --git a/buildbot/config/worker/utils.py b/buildbot/config/worker/utils.py index 74abf32..7460d27 100644 --- a/buildbot/config/worker/utils.py +++ b/buildbot/config/worker/utils.py @@ -115,7 +115,7 @@ CmdArgument = Union[str, pathlib.Path, HiddenArgument, Any] CmdList = List[CmdArgument] CmdSequence = Sequence[CmdArgument] CmdFilterOutput = Optional[Callable[[str], Optional[str]]] -CmdEnvironment = Optional[Dict[str, str]] +Cmdenvironment = Optional[Dict[str, str]] def _prepare_call( @@ -142,7 +142,7 @@ def _prepare_call( def call( cmd: CmdSequence, - env: CmdEnvironment = None, + env: Cmdenvironment = None, exit_on_error: bool = True, filter_output: CmdFilterOutput = None, retry_count: int = 0, diff --git a/docker-compose.yml b/docker-compose.yml index 19a7868..b2dfb0b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,7 @@ services: buildbot-master: image: 'buildbot/buildbot-master:${BUILDBOT_IMAGE_TAG:-v4.1.0}' + env_file: .env hostname: buildbot-master restart: unless-stopped ports: @@ -59,7 +60,7 @@ services: - 'BUILDBOT_ENVIRONMENT=${BUILDBOT_ENVIRONMENT:-LOCAL}' - 'WORKERNAME=${WORKERNAME:-localhost}' - 'WORKERPASS=${WORKERPASS:-localhost}' - - 'WORKER_ENVIRONMENT_BLACKLIST=${WORKER_ENVIRONMENT_BLACKLIST:-DOCKER_BUILDBOT* BUILDBOT_ENV_* BUILDBOT_1* WORKER_ENVIRONMENT_BLACKLIST}' + - 'WORKER_environment_BLACKLIST=${WORKER_environment_BLACKLIST:-DOCKER_BUILDBOT* BUILDBOT_ENV_* BUILDBOT_1* WORKER_environment_BLACKLIST}' healthcheck: test: - CMD -- 2.45.2 From b91ee462b8904ab632614a986fbe14615873b280 Mon Sep 17 00:00:00 2001 From: Bart van der Braak Date: Thu, 21 Nov 2024 09:02:00 +0100 Subject: [PATCH 11/13] Working authentication with orgs and group retrieval --- buildbot/config/conf/auth.py | 13 ++++++++++ buildbot/config/conf/local/auth.py | 38 +++++++++++++++++++++++------- buildbot/config/setup.py | 14 +---------- 3 files changed, 44 insertions(+), 21 deletions(-) diff --git a/buildbot/config/conf/auth.py b/buildbot/config/conf/auth.py index 30cb268..3026b4c 100644 --- a/buildbot/config/conf/auth.py +++ b/buildbot/config/conf/auth.py @@ -119,4 +119,17 @@ def fetch_authorization(environment: str): roleMatchers=file_based_group_username_role_matchers, ) + # my_authz = buildbot.plugins.util.Authz( + # allowRules=[ + # buildbot.plugins.util.AnyControlEndpointMatcher( + # role="Developers" + # ), # Organization teams + # ], + # roleMatchers=[ + # buildbot.plugins.util.RolesFromGroups( + # groupPrefix="test-org/" + # ) # Gitea organization + # ], + # ) + return my_authz diff --git a/buildbot/config/conf/local/auth.py b/buildbot/config/conf/local/auth.py index 30eb7c2..33e611b 100644 --- a/buildbot/config/conf/local/auth.py +++ b/buildbot/config/conf/local/auth.py @@ -2,9 +2,9 @@ # SPDX-FileCopyrightText: 2011-2024 Blender Authors # -# import buildbot.plugins import os from buildbot.www.oauth2 import OAuth2Auth +import buildbot.plugins from urllib.parse import urljoin # Buildbot admin with access to everything. @@ -29,8 +29,8 @@ gitea_client_secret = os.environ.get("GITEA_CLIENT_SECRET", default="") def get_authentication(environment: str): class GiteaAuth(OAuth2Auth): - name = "projects.blender.org" - faIcon = "fa-cogs" + name = "Gitea" + faIcon = "fa-gitea" AUTH_URL = "login/oauth/authorize" TOKEN_URL = "login/oauth/access_token" @@ -42,10 +42,32 @@ def get_authentication(environment: str): self.tokenUri = urljoin(endpoint, self.TOKEN_URL) def getUserInfoFromOAuthClient(self, c): - return self.get(c, "/api/v1/user") + user_info = self.get(c, "/api/v1/user") - # class LocalEnvAuth(buildbot.plugins.util.CustomAuth): - # def check_credentials(self, user, password): - # return user.decode() == "admin" and password.decode() == "admin" + orgs = self.get(c, "/api/v1/user/orgs") + org_groups = [org["username"] for org in orgs] - return GiteaAuth(gitea_endpoint, gitea_client_id, gitea_client_secret) + teams = self.get(c, "/api/v1/user/teams") + team_groups = [ + f"{team['organization']['username']}/{team['name']}" for team in teams + ] # Format: org/team + + groups = org_groups + team_groups + + user_data = { + "full_name": user_info.get("full_name", user_info.get("username")), + "email": user_info.get("email"), + "username": user_info.get("username"), + "groups": groups, + } + + return user_data + + class LocalEnvAuth(buildbot.plugins.util.CustomAuth): + def check_credentials(self, user, password): + return user.decode() == "admin" and password.decode() == "admin" + + if gitea_endpoint and gitea_client_id and gitea_client_secret: + return GiteaAuth(gitea_endpoint, gitea_client_id, gitea_client_secret) + else: + return LocalEnvAuth() diff --git a/buildbot/config/setup.py b/buildbot/config/setup.py index 7d73acf..2aefd08 100644 --- a/buildbot/config/setup.py +++ b/buildbot/config/setup.py @@ -130,19 +130,7 @@ def setup() -> Dict[str, Any]: c["www"]["auth"] = conf.auth.fetch_authentication(environment) # Authorization - # c["www"]["authz"] = conf.auth.fetch_authorization(environment) - c["www"]["authz"] = buildbot.plugins.util.Authz( - allowRules=[ - buildbot.plugins.util.AnyControlEndpointMatcher( - role="Admins" - ), # Organization teams - ], - roleMatchers=[ - buildbot.plugins.util.RolesFromGroups( - groupPrefix="test-org/" - ) # Gitea organization - ], - ) + c["www"]["authz"] = conf.auth.fetch_authorization(environment) # Disable UI - does not work c["www"]["plugins"] = { -- 2.45.2 From 2eb472cc20ad276cf0d01202bd372f16b2c74d23 Mon Sep 17 00:00:00 2001 From: Bart van der Braak Date: Tue, 26 Nov 2024 12:33:25 +0100 Subject: [PATCH 12/13] Team-based authorization --- buildbot/config/conf/auth.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/buildbot/config/conf/auth.py b/buildbot/config/conf/auth.py index 3026b4c..1397212 100644 --- a/buildbot/config/conf/auth.py +++ b/buildbot/config/conf/auth.py @@ -119,17 +119,17 @@ def fetch_authorization(environment: str): roleMatchers=file_based_group_username_role_matchers, ) - # my_authz = buildbot.plugins.util.Authz( - # allowRules=[ - # buildbot.plugins.util.AnyControlEndpointMatcher( - # role="Developers" - # ), # Organization teams - # ], - # roleMatchers=[ - # buildbot.plugins.util.RolesFromGroups( - # groupPrefix="test-org/" - # ) # Gitea organization - # ], - # ) + my_authz = buildbot.plugins.util.Authz( + allowRules=[ + buildbot.plugins.util.AnyControlEndpointMatcher( + role="Developers" + ), # Organization teams + ], + roleMatchers=[ + buildbot.plugins.util.RolesFromGroups( + groupPrefix="test-org/" + ) # Gitea organization + ], + ) return my_authz -- 2.45.2 From 043a1b6ffa9779b577e220cfe22aa92d8c146ac0 Mon Sep 17 00:00:00 2001 From: Bart van der Braak Date: Tue, 26 Nov 2024 17:16:21 +0100 Subject: [PATCH 13/13] Add Prometheus metrics to Buildbot --- Dockerfile | 11 ++++++++ buildbot/config/setup.py | 59 +++++++++++++++++++++++----------------- docker-compose.yml | 11 +++++++- prometheus.yml | 7 +++++ provision/local/main.tf | 49 +++++++++++++++++++++++++++++++++ 5 files changed, 111 insertions(+), 26 deletions(-) create mode 100644 Dockerfile create mode 100644 prometheus.yml create mode 100644 provision/local/main.tf diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..031e625 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,11 @@ +# Use the published buildbot/buildbot-master image as the base +FROM buildbot/buildbot-master:master + +# Install buildbot-prometheus in the existing virtual environment +RUN /buildbot_venv/bin/pip3 install buildbot-prometheus + +# Set the working directory to where the buildbot files are expected +WORKDIR /buildbot + +# Keep the existing command to start buildbot +CMD ["dumb-init", "/usr/src/buildbot/start_buildbot.sh"] diff --git a/buildbot/config/setup.py b/buildbot/config/setup.py index 2aefd08..fc15bd1 100644 --- a/buildbot/config/setup.py +++ b/buildbot/config/setup.py @@ -85,6 +85,12 @@ def setup() -> Dict[str, Any]: else: c["services"] = [] + c["services"].append( + buildbot.plugins.reporters.Prometheus( + port=int(os.environ.get("BUILDBOT_PROMETHEUS_PORT", default=9100)) + ) + ) + ####### PROJECT IDENTITY # the 'title' string will appear at the top of this buildbot installation's @@ -99,31 +105,41 @@ def setup() -> Dict[str, Any]: # buildbot cannot figure out without some help. c["buildbotURL"] = os.environ.get("BUILDBOT_WEB_URL", "http://localhost:8010/") - # Minimalistic config to activate new web UI - c["www"] = dict( - port=os.environ.get("BUILDBOT_WEB_PORT", 8010), - plugins=dict(waterfall_view={}, console_view={}, grid_view={}), - theme={ - "bb-sidebar-background-color": "#1F2226", # Eerie Black 2 - "bb-sidebar-header-background-color": "#202327", # Eerie Black - "bb-sidebar-header-text-color": "#9fa3a8", # Dim Gray (Lighter gray for text) - "bb-sidebar-title-text-color": "#9fa3a8", # Dim Gray (Titles) - "bb-sidebar-footer-background-color": "#292d32", # Jet - "bb-sidebar-button-text-color": "#9fa3a8", # Dim Gray (Button text) - "bb-sidebar-button-hover-background-color": "#292d32", # Jet (Button hover background) - "bb-sidebar-button-hover-text-color": "#3dabf5", # Light blue for hover text - "bb-sidebar-button-current-background-color": "#292d32", # Jet (Current button background) - "bb-sidebar-button-current-text-color": "#3dabf5", # Light blue for current button text - "bb-sidebar-stripe-hover-color": "#3695D5", # Celestial Blue - "bb-sidebar-stripe-current-color": "#084F7E", # Indigo Dye - }, + # Initialize + c["www"] = {} + + # Port + c["www"]["port"] = os.environ.get("BUILDBOT_WEB_PORT", 8010) + + # Plugins + c["www"]["plugins"] = dict( + waterfall_view={}, + console_view={}, + grid_view={}, ) + # Theme + c["www"]["theme"] = { + "bb-sidebar-background-color": "#1F2226", # Eerie Black 2 + "bb-sidebar-header-background-color": "#202327", # Eerie Black + "bb-sidebar-header-text-color": "#9fa3a8", # Dim Gray (Lighter gray for text) + "bb-sidebar-title-text-color": "#9fa3a8", # Dim Gray (Titles) + "bb-sidebar-footer-background-color": "#292d32", # Jet + "bb-sidebar-button-text-color": "#9fa3a8", # Dim Gray (Button text) + "bb-sidebar-button-hover-background-color": "#292d32", # Jet (Button hover background) + "bb-sidebar-button-hover-text-color": "#3dabf5", # Light blue for hover text + "bb-sidebar-button-current-background-color": "#292d32", # Jet (Current button background) + "bb-sidebar-button-current-text-color": "#3dabf5", # Light blue for current button text + "bb-sidebar-stripe-hover-color": "#3695D5", # Celestial Blue + "bb-sidebar-stripe-current-color": "#084F7E", # Indigo Dye + } + # Database c["db"] = { "db_url": os.environ.get("BUILDBOT_DB_URL", "sqlite://").format(**os.environ) } + # Share usage data c["buildbotNetUsageData"] = None # Authentication @@ -132,13 +148,6 @@ def setup() -> Dict[str, Any]: # Authorization c["www"]["authz"] = conf.auth.fetch_authorization(environment) - # Disable UI - does not work - c["www"]["plugins"] = { - "waterfall_view": False, - "console_view": False, - "grid_view": False, - } - # UI Defaults c["www"]["ui_default_config"] = { "Grid.fullChanges": True, diff --git a/docker-compose.yml b/docker-compose.yml index b2dfb0b..fa67f08 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,8 @@ services: buildbot-master: - image: 'buildbot/buildbot-master:${BUILDBOT_IMAGE_TAG:-v4.1.0}' + # image: 'buildbot/buildbot-master:${BUILDBOT_IMAGE_TAG:-v4.1.0}' + build: + context: . env_file: .env hostname: buildbot-master restart: unless-stopped @@ -16,6 +18,7 @@ services: - 'POSTGRES_USER=${POSTGRES_USER:-buildbot}' - 'POSTGRES_DB=${POSTGRES_DB:-buildbot}' - 'BUILDBOT_DB_URL=postgresql+psycopg2://{POSTGRES_USER}:{POSTGRES_PASSWORD}@postgresql/{POSTGRES_DB}' + - 'BUILDBOT_PROMETHEUS_PORT=9100' healthcheck: test: - CMD @@ -72,6 +75,12 @@ services: retries: 10 networks: buildbot: null + prometheus: + image: prom/prometheus + volumes: + - './prometheus.yml:/etc/prometheus/prometheus.yml' + ports: + - '9090:9090' volumes: buildbot-db: {} networks: diff --git a/prometheus.yml b/prometheus.yml new file mode 100644 index 0000000..eba9cea --- /dev/null +++ b/prometheus.yml @@ -0,0 +1,7 @@ +global: + scrape_interval: 60s +scrape_configs: + - job_name: buildbot + static_configs: + - targets: + - buildbot-master:9100 \ No newline at end of file diff --git a/provision/local/main.tf b/provision/local/main.tf new file mode 100644 index 0000000..e9355dd --- /dev/null +++ b/provision/local/main.tf @@ -0,0 +1,49 @@ +terraform { + required_providers { + libvirt = { + source = "dmacvicar/libvirt" + } + } +} + +provider "libvirt" { + uri = "qemu:///system" +} + +resource "libvirt_volume" "win11-ltsc-cloudbase-init" { + name = "win11-ltsc-cloudbase-init.qcow2" + pool = "default" # List storage pools using virsh pool-list + source = "win11-ltsc-original.qcow2" + format = "qcow2" +} + +resource "libvirt_domain" "win11-ltsc-cloudbase-init" { + name = "win11-ltsc-cloudbase-init" + memory = "8192" + vcpu = 4 + + network_interface { + network_name = "default" # List networks with virsh net-list + } + + disk { + volume_id = "${libvirt_volume.win11-ltsc-cloudbase-init.id}" + } + + console { + type = "pty" + target_type = "serial" + target_port = "0" + } + + graphics { + type = "spice" + listen_type = "address" + autoport = true + } +} + +# Output Server IP +output "ip" { + value = "${libvirt_domain.centos7.network_interface.0.addresses.0}" +} \ No newline at end of file -- 2.45.2