Compare commits
	
		
			3 commits
		
	
	
		
			18e653fd2e
			...
			77ae214d24
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 77ae214d24 | ||
|  | d6bce1b39d | ||
|  | 0a1454d250 | 
					 67 changed files with 8367 additions and 53 deletions
				
			
		|  | @ -1,3 +0,0 @@ | |||
| SERVICE_USER_POSTGRESQL=buildbot | ||||
| SERVICE_PASSWORD_POSTGRESQL=changeme! | ||||
| BUILDBOT_CONFIG_URL='' | ||||
							
								
								
									
										4
									
								
								.env.local
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										4
									
								
								.env.local
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,4 @@ | |||
| SERVICE_USER_POSTGRESQL=buildbot | ||||
| SERVICE_PASSWORD_POSTGRESQL=changeme! | ||||
| GITEA_URL=https://projects.blender.org | ||||
| BUILDBOT_WEB_URL=http://localhost:8010/ | ||||
|  | @ -1,4 +1,3 @@ | |||
| name: Run checks | ||||
| on:  | ||||
|   pull_request: | ||||
|     branches: | ||||
|  |  | |||
							
								
								
									
										3
									
								
								.gitignore
									
										
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.gitignore
									
										
									
									
										vendored
									
									
								
							|  | @ -1,2 +1,3 @@ | |||
| .venv | ||||
| .env | ||||
| .env.staging | ||||
| .env.production | ||||
							
								
								
									
										0
									
								
								config/conf/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								config/conf/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
								
								
									
										122
									
								
								config/conf/auth.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										122
									
								
								config/conf/auth.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,122 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import importlib | ||||
| 
 | ||||
| import buildbot.plugins | ||||
| 
 | ||||
| 
 | ||||
| def _get_auth_config(ENVIRONMENT: str): | ||||
|     if ENVIRONMENT == "LOCAL": | ||||
|         import conf.local.auth | ||||
| 
 | ||||
|         importlib.reload(conf.local.auth) | ||||
|         return conf.local.auth | ||||
|     else: | ||||
|         import conf.production.auth | ||||
| 
 | ||||
|         importlib.reload(conf.production.auth) | ||||
|         return conf.production.auth | ||||
| 
 | ||||
| 
 | ||||
| def fetch_authentication(ENVIRONMENT: str): | ||||
|     auth_config = _get_auth_config(ENVIRONMENT) | ||||
|     return auth_config.get_authentication(ENVIRONMENT) | ||||
| 
 | ||||
| 
 | ||||
| def fetch_authorization(ENVIRONMENT: str): | ||||
|     auth_config = _get_auth_config(ENVIRONMENT) | ||||
| 
 | ||||
|     admin_usernames = auth_config.admin_usernames | ||||
|     deploy_dev_usernames = auth_config.deploy_dev_usernames | ||||
|     trusted_dev_usernames = auth_config.trusted_dev_usernames | ||||
| 
 | ||||
|     dev_usernames = list( | ||||
|         set(deploy_dev_usernames + trusted_dev_usernames + admin_usernames) | ||||
|     ) | ||||
|     deploy_usernames = list(set(deploy_dev_usernames + admin_usernames)) | ||||
| 
 | ||||
|     file_based_group_username_role_matchers = [ | ||||
|         buildbot.plugins.util.RolesFromUsername( | ||||
|             roles=["admin"], usernames=admin_usernames | ||||
|         ), | ||||
|         buildbot.plugins.util.RolesFromUsername( | ||||
|             roles=["deploy"], usernames=deploy_usernames | ||||
|         ), | ||||
|         buildbot.plugins.util.RolesFromUsername(roles=["dev"], usernames=dev_usernames), | ||||
|     ] | ||||
| 
 | ||||
|     my_authz = buildbot.plugins.util.Authz( | ||||
|         stringsMatcher=buildbot.plugins.util.fnmatchStrMatcher, | ||||
|         allowRules=[ | ||||
|             # Admins can do anything, | ||||
|             # | ||||
|             # defaultDeny=False: if user does not have the admin role, we continue | ||||
|             # parsing rules | ||||
|             # buildbot.plugins.util.AnyEndpointMatcher(role='admin', defaultDeny=False), | ||||
|             # buildbot.plugins.util.AnyEndpointMatcher(role='dev', defaultDeny=False), | ||||
|             # buildbot.plugins.util.AnyEndpointMatcher(role='coordinator', defaultDeny=False), | ||||
|             # buildbot.plugins.util.AnyEndpointMatcher(role='anonymous', defaultDeny=False), | ||||
|             buildbot.plugins.util.StopBuildEndpointMatcher( | ||||
|                 role="dev", defaultDeny=True | ||||
|             ), | ||||
|             buildbot.plugins.util.RebuildBuildEndpointMatcher( | ||||
|                 role="dev", defaultDeny=True | ||||
|             ), | ||||
|             buildbot.plugins.util.EnableSchedulerEndpointMatcher( | ||||
|                 role="admin", defaultDeny=True | ||||
|             ), | ||||
|             # buildbot.plugins.util.AnyEndpointMatcher(role='any', defaultDeny=False), | ||||
|             # Force roles | ||||
|             buildbot.plugins.util.ForceBuildEndpointMatcher( | ||||
|                 builder="*-code-experimental-*", role="dev", defaultDeny=True | ||||
|             ), | ||||
|             buildbot.plugins.util.ForceBuildEndpointMatcher( | ||||
|                 builder="*-code-patch-*", role="dev", defaultDeny=True | ||||
|             ), | ||||
|             buildbot.plugins.util.ForceBuildEndpointMatcher( | ||||
|                 builder="*-code-daily-*", role="dev", defaultDeny=True | ||||
|             ), | ||||
|             buildbot.plugins.util.ForceBuildEndpointMatcher( | ||||
|                 builder="*-store-*", role="deploy", defaultDeny=True | ||||
|             ), | ||||
|             buildbot.plugins.util.ForceBuildEndpointMatcher( | ||||
|                 builder="*-deploy-*", role="deploy", defaultDeny=True | ||||
|             ), | ||||
|             buildbot.plugins.util.ForceBuildEndpointMatcher( | ||||
|                 builder="*-doc-*", role="dev", defaultDeny=True | ||||
|             ), | ||||
|             # Rebuild roles | ||||
|             buildbot.plugins.util.RebuildBuildEndpointMatcher( | ||||
|                 builder="*-code-experimental-*", role="dev", defaultDeny=True | ||||
|             ), | ||||
|             buildbot.plugins.util.RebuildBuildEndpointMatcher( | ||||
|                 builder="*-code-patch-*", role="dev", defaultDeny=True | ||||
|             ), | ||||
|             buildbot.plugins.util.RebuildBuildEndpointMatcher( | ||||
|                 builder="*-code-daily-*", role="dev", defaultDeny=True | ||||
|             ), | ||||
|             buildbot.plugins.util.RebuildBuildEndpointMatcher( | ||||
|                 builder="*-store-*", role="deploy", defaultDeny=True | ||||
|             ), | ||||
|             buildbot.plugins.util.RebuildBuildEndpointMatcher( | ||||
|                 builder="*-deploy-*", role="deploy", defaultDeny=True | ||||
|             ), | ||||
|             buildbot.plugins.util.RebuildBuildEndpointMatcher( | ||||
|                 builder="*-doc-*", role="dev", defaultDeny=True | ||||
|             ), | ||||
|             # This also affects starting jobs via force scheduler | ||||
|             buildbot.plugins.util.AnyControlEndpointMatcher( | ||||
|                 role="admin", defaultDeny=True | ||||
|             ), | ||||
|             # A default deny for any endpoint if not admin | ||||
|             # If this is missing at the end, any UNMATCHED group will get 'allow'... | ||||
|             buildbot.plugins.util.AnyControlEndpointMatcher( | ||||
|                 role="admin", defaultDeny=True | ||||
|             ), | ||||
|         ], | ||||
|         roleMatchers=file_based_group_username_role_matchers, | ||||
|     ) | ||||
| 
 | ||||
|     return my_authz | ||||
							
								
								
									
										106
									
								
								config/conf/branches.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										106
									
								
								config/conf/branches.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,106 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import copy | ||||
| 
 | ||||
| from collections import OrderedDict | ||||
| 
 | ||||
| # Blender repository branches used for daily builds and API doc generation. | ||||
| code_tracked_branch_ids = { | ||||
|     "vdev": "main", | ||||
|     "vexp": "", | ||||
|     "v360": "blender-v3.6-release", | ||||
|     "v420": "blender-v4.2-release", | ||||
|     "v430": "blender-v4.3-release", | ||||
| } | ||||
| 
 | ||||
| # Processor architectures to build for each track. | ||||
| code_official_platform_architectures = { | ||||
|     "vdev": ["darwin-x86_64", "darwin-arm64", "linux-x86_64", "windows-amd64"], | ||||
|     "vexp": ["darwin-x86_64", "darwin-arm64", "linux-x86_64", "windows-amd64"], | ||||
|     "v360": ["darwin-x86_64", "darwin-arm64", "linux-x86_64", "windows-amd64"], | ||||
|     "v420": ["darwin-x86_64", "darwin-arm64", "linux-x86_64", "windows-amd64"], | ||||
|     "v430": ["darwin-x86_64", "darwin-arm64", "linux-x86_64", "windows-amd64"], | ||||
| } | ||||
| 
 | ||||
| # Windows ARM64 not used by default yet. | ||||
| code_all_platform_architectures = copy.deepcopy(code_official_platform_architectures) | ||||
| code_all_platform_architectures["vdev"].append("windows-arm64") | ||||
| code_all_platform_architectures["vexp"].append("windows-arm64") | ||||
| code_all_platform_architectures["v430"].append("windows-arm64") | ||||
| 
 | ||||
| track_major_minor_versions = { | ||||
|     "vdev": "4.4", | ||||
|     "vexp": "4.4", | ||||
|     "v360": "3.6", | ||||
|     "v330": "3.3", | ||||
|     "v420": "4.2", | ||||
|     "v430": "4.3", | ||||
| } | ||||
| 
 | ||||
| # Blender code and manual git branches. | ||||
| track_code_branches = { | ||||
|     "vdev": "main", | ||||
|     "vexp": "main", | ||||
|     "v360": "blender-v3.6-release", | ||||
|     "v420": "blender-v4.2-release", | ||||
|     "v430": "blender-v4.3-release", | ||||
| } | ||||
| 
 | ||||
| # Tracks that correspond to an LTS version released on the Windows Store. | ||||
| # Only add entries here AFTER the regular release is out, since it will | ||||
| # otherwise generate the wrong package for the regular release. | ||||
| windows_store_lts_tracks = ["v360", "v420"] | ||||
| 
 | ||||
| # Tracks that correspond to active and upcoming LTS releases. Used for | ||||
| # the Snap track name, and for Steam to determine if there is a daily LTS | ||||
| # track to upload to. | ||||
| all_lts_tracks = ["v360", "v420"] | ||||
| 
 | ||||
| # Tracks for automated delivery of daily builds to stores. | ||||
| code_store_track_ids = [ | ||||
|     "vdev", | ||||
|     "v360", | ||||
|     "v420", | ||||
|     "v430", | ||||
| ] | ||||
| 
 | ||||
| # Tracks to deploy releases (regular and LTS) to download.blender.org. | ||||
| code_deploy_track_ids = { | ||||
|     "v360": None, | ||||
|     "v420": None, | ||||
|     "v430": None, | ||||
| } | ||||
| 
 | ||||
| # Stable track for manual and API docs. | ||||
| # Update on release. | ||||
| doc_stable_major_minor_version = "4.3" | ||||
| 
 | ||||
| # Versions and labels for the user manual version switching menu. | ||||
| # Update when creating new release branch, and on release. | ||||
| doc_manual_version_labels = OrderedDict( | ||||
|     [ | ||||
|         ("2.79", "2.79"), | ||||
|         ("2.80", "2.80"), | ||||
|         ("2.81", "2.81"), | ||||
|         ("2.82", "2.82"), | ||||
|         ("2.83", "2.83 (LTS)"), | ||||
|         ("2.90", "2.90"), | ||||
|         ("2.91", "2.91"), | ||||
|         ("2.92", "2.92"), | ||||
|         ("2.93", "2.93 (LTS)"), | ||||
|         ("3.0", "3.0"), | ||||
|         ("3.1", "3.1"), | ||||
|         ("3.2", "3.2"), | ||||
|         ("3.3", "3.3 (LTS)"), | ||||
|         ("3.4", "3.4"), | ||||
|         ("3.5", "3.5"), | ||||
|         ("3.6", "3.6 (LTS)"), | ||||
|         ("4.0", "4.0"), | ||||
|         ("4.1", "4.1"), | ||||
|         ("4.2", "4.2 (LTS)"), | ||||
|         ("4.3", "4.3"), | ||||
|         ("4.4", "4.4 (develop)"), | ||||
|     ] | ||||
| ) | ||||
							
								
								
									
										0
									
								
								config/conf/local/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								config/conf/local/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
								
								
									
										28
									
								
								config/conf/local/auth.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										28
									
								
								config/conf/local/auth.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,28 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import buildbot.plugins | ||||
| 
 | ||||
| # Buildbot admin with access to everything. | ||||
| admin_usernames = [ | ||||
|     "admin", | ||||
| ] | ||||
| 
 | ||||
| # Release engineers with access to store and deploy builders. | ||||
| deploy_dev_usernames = [ | ||||
|     "admin", | ||||
| ] | ||||
| 
 | ||||
| # Trusted developers with access to trigger daily, doc and patch builds. | ||||
| trusted_dev_usernames = [ | ||||
|     "admin", | ||||
| ] | ||||
| 
 | ||||
| 
 | ||||
| def get_authentication(ENVIRONMENT: str): | ||||
|     class LocalEnvAuth(buildbot.plugins.util.CustomAuth): | ||||
|         def check_credentials(self, user, password): | ||||
|             return user.decode() == "admin" and password.decode() == "admin" | ||||
| 
 | ||||
|     return LocalEnvAuth() | ||||
							
								
								
									
										31
									
								
								config/conf/local/machines.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										31
									
								
								config/conf/local/machines.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,31 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| _worker_names = { | ||||
|     "code-lint": ["localhost"], | ||||
|     "linux-x86_64-code": ["localhost"], | ||||
|     "linux-x86_64-code-gpu": ["localhost"], | ||||
|     "linux-x86_64-doc-api": ["localhost"], | ||||
|     "linux-x86_64-doc-studio-tools": ["localhost"], | ||||
|     "linux-x86_64-general": ["localhost"], | ||||
|     "linux-x86_64-store-snap": ["localhost"], | ||||
|     "linux-x86_64-store-steam": ["localhost"], | ||||
|     "darwin-arm64-code": ["localhost"], | ||||
|     "darwin-arm64-code-gpu": ["localhost"], | ||||
|     "darwin-x86_64-code": ["localhost"], | ||||
|     "darwin-x86_64-code-gpu": ["localhost"], | ||||
|     "windows-amd64-code": ["localhost"], | ||||
|     "windows-amd64-code-gpu": [], | ||||
|     "windows-amd64-store-windows": ["localhost"], | ||||
|     "windows-arm64-code": ["localhost"], | ||||
|     "windows-arm64-code-gpu": [], | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| def get_worker_password(worker_name: str) -> str: | ||||
|     return "localhost" | ||||
| 
 | ||||
| 
 | ||||
| def get_worker_names(ENVIRONMENT: str): | ||||
|     return _worker_names | ||||
							
								
								
									
										87
									
								
								config/conf/local/worker.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										87
									
								
								config/conf/local/worker.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,87 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import os | ||||
| import pathlib | ||||
| 
 | ||||
| from typing import Optional, Tuple | ||||
| 
 | ||||
| # Where tracks data is stored. | ||||
| tracks_root_path = pathlib.Path.home() / "git" | ||||
| 
 | ||||
| # Software cache | ||||
| software_cache_path = tracks_root_path / "downloads" / "software" / "workers" | ||||
| 
 | ||||
| # Docs delivery. | ||||
| docs_user = os.getlogin() | ||||
| docs_machine = "127.0.0.1" | ||||
| docs_folder = tracks_root_path / "delivery" / "docs" | ||||
| docs_port = 22 | ||||
| 
 | ||||
| # Studio docs delivery. | ||||
| studio_user = os.getlogin() | ||||
| studio_machine = "127.0.0.1" | ||||
| studio_folder = tracks_root_path / "delivery" / "studio" / "blender-studio-tools" | ||||
| studio_port = 22 | ||||
| 
 | ||||
| # Download delivery. | ||||
| download_user = os.getlogin() | ||||
| download_machine = "127.0.0.1" | ||||
| download_source_folder = tracks_root_path / "delivery" / "download" / "source" | ||||
| download_release_folder = tracks_root_path / "delivery" / "download" / "release" | ||||
| download_port = 22 | ||||
| 
 | ||||
| # Buildbot download delivery | ||||
| buildbot_download_folder = tracks_root_path / "delivery" / "buildbot" | ||||
| 
 | ||||
| # Code signing | ||||
| sign_code_windows_certificate = None  # "Blender Self Code Sign SPC" | ||||
| sign_code_windows_time_servers = ["http://ts.ssl.com"] | ||||
| sign_code_windows_server_url = "http://fake-windows-sign-server" | ||||
| 
 | ||||
| sign_code_darwin_certificate = None | ||||
| sign_code_darwin_team_id = None | ||||
| sign_code_darwin_apple_id = None | ||||
| sign_code_darwin_keychain_profile = None | ||||
| 
 | ||||
| 
 | ||||
| def darwin_keychain_password(service_env_id: str) -> str: | ||||
|     return "fake_keychain_password" | ||||
| 
 | ||||
| 
 | ||||
| # Steam | ||||
| steam_app_id = None | ||||
| steam_platform_depot_ids = { | ||||
|     "windows": None, | ||||
|     "linux": None, | ||||
|     "darwin": None, | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| def steam_credentials(service_env_id: str) -> Tuple[str, str]: | ||||
|     return "fake_steam_username", "fake_steam_password" | ||||
| 
 | ||||
| 
 | ||||
| # Snap | ||||
| def snap_credentials(service_env_id: str) -> str: | ||||
|     return "fake_snap_credentials" | ||||
| 
 | ||||
| 
 | ||||
| # Windows Store | ||||
| windows_store_self_sign = False | ||||
| 
 | ||||
| 
 | ||||
| def windows_store_certificate(service_env_id: str) -> str: | ||||
|     # return sign_code_windows_certificate | ||||
|     return "fake_windows_store_publisher" | ||||
| 
 | ||||
| 
 | ||||
| # PyPI | ||||
| def pypi_token(service_env_id: str) -> str: | ||||
|     return "fake_pypi_token" | ||||
| 
 | ||||
| 
 | ||||
| # Gitea | ||||
| def gitea_api_token(service_env_id: str) -> Optional[str]: | ||||
|     return None | ||||
							
								
								
									
										39
									
								
								config/conf/machines.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										39
									
								
								config/conf/machines.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,39 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import importlib | ||||
| 
 | ||||
| 
 | ||||
| def _get_config(ENVIRONMENT: str): | ||||
|     if ENVIRONMENT == "LOCAL": | ||||
|         import conf.local.machines | ||||
| 
 | ||||
|         importlib.reload(conf.local.machines) | ||||
|         return conf.local.machines | ||||
|     else: | ||||
|         import conf.production.machines | ||||
| 
 | ||||
|         importlib.reload(conf.production.machines) | ||||
|         return conf.production.machines | ||||
| 
 | ||||
| 
 | ||||
| def fetch_platform_worker_names(ENVIRONMENT: str): | ||||
|     machines_config = _get_config(ENVIRONMENT) | ||||
|     return machines_config.get_worker_names(ENVIRONMENT) | ||||
| 
 | ||||
| 
 | ||||
| def get_worker_password(ENVIRONMENT: str, worker_name: str) -> str: | ||||
|     machines_config = _get_config(ENVIRONMENT) | ||||
|     return machines_config.get_worker_password(worker_name) | ||||
| 
 | ||||
| 
 | ||||
| def fetch_local_worker_names(): | ||||
|     worker_names = [] | ||||
|     worker_numbers = range(1, 5, 1) | ||||
|     for worker_number in worker_numbers: | ||||
|         worker_id = str(worker_number).zfill(2) | ||||
|         worker_name = f"local-coordinator-{worker_id}" | ||||
|         worker_names += [worker_name] | ||||
| 
 | ||||
|     return worker_names | ||||
							
								
								
									
										37
									
								
								config/conf/worker.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										37
									
								
								config/conf/worker.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,37 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import importlib | ||||
| 
 | ||||
| from typing import Any | ||||
| 
 | ||||
| 
 | ||||
| def get_config(ENVIRONMENT: str) -> Any: | ||||
|     if ENVIRONMENT == "LOCAL": | ||||
|         import conf.local.worker | ||||
| 
 | ||||
|         importlib.reload(conf.local.worker) | ||||
|         return conf.local.worker | ||||
|     else: | ||||
|         import conf.production.worker | ||||
| 
 | ||||
|         importlib.reload(conf.production.worker) | ||||
|         return conf.production.worker | ||||
| 
 | ||||
| 
 | ||||
| # Maybe useful in the future. | ||||
| # | ||||
| # import pathlib | ||||
| # import importlib.util | ||||
| # | ||||
| # def _load_module_config(path: pathlib.Path) -> Any: | ||||
| #     filepath = pathlib.Path(__file__).parent / path | ||||
| #     spec = importlib.util.spec_from_file_location("config_module", filepath) | ||||
| #     if not spec: | ||||
| #         raise BaseException("Failed to load config module spec") | ||||
| #     config_module = importlib.util.module_from_spec(spec) | ||||
| #     if not spec.loader: | ||||
| #         raise BaseException("Failed to load config module spec loader") | ||||
| #     spec.loader.exec_module(config_module) | ||||
| #     return config_module | ||||
							
								
								
									
										21
									
								
								config/gitea/LICENSE
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										21
									
								
								config/gitea/LICENSE
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,21 @@ | |||
| MIT License | ||||
| 
 | ||||
| Copyright (c) 2018 LAB132 | ||||
| 
 | ||||
| Permission is hereby granted, free of charge, to any person obtaining a copy | ||||
| of this software and associated documentation files (the "Software"), to deal | ||||
| in the Software without restriction, including without limitation the rights | ||||
| to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||||
| copies of the Software, and to permit persons to whom the Software is | ||||
| furnished to do so, subject to the following conditions: | ||||
| 
 | ||||
| The above copyright notice and this permission notice shall be included in all | ||||
| copies or substantial portions of the Software. | ||||
| 
 | ||||
| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||||
| IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||||
| FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||||
| AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||||
| LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||||
| OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | ||||
| SOFTWARE. | ||||
							
								
								
									
										4
									
								
								config/gitea/README.md
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										4
									
								
								config/gitea/README.md
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,4 @@ | |||
| ### Buildbot Gitea Integration | ||||
| 
 | ||||
| Based on: | ||||
| https://github.com/lab132/buildbot-gitea | ||||
							
								
								
									
										0
									
								
								config/gitea/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								config/gitea/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
								
								
									
										64
									
								
								config/gitea/blender.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										64
									
								
								config/gitea/blender.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,64 @@ | |||
| # SPDX-License-Identifier: MIT | ||||
| # SPDX-FileCopyrightText: 2018 LAB132 | ||||
| # SPDX-FileCopyrightText: 2013-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| # Based on the gitlab reporter from buildbot | ||||
| 
 | ||||
| from twisted.python import log | ||||
| 
 | ||||
| import buildbot.plugins | ||||
| 
 | ||||
| import importlib | ||||
| import requests | ||||
| 
 | ||||
| import gitea.reporter | ||||
| 
 | ||||
| importlib.reload(gitea.reporter) | ||||
| 
 | ||||
| # Create status reporter service. | ||||
| gitea_url = "https://projects.blender.org" | ||||
| gitea_api_token = None | ||||
| gitea_status_service = None | ||||
| 
 | ||||
| 
 | ||||
| def setup_service(ENVIRONMENT: str): | ||||
|     import conf.worker | ||||
| 
 | ||||
|     importlib.reload(conf.worker) | ||||
|     worker_config = conf.worker.get_config(ENVIRONMENT) | ||||
|     gitea_api_token = worker_config.gitea_api_token(ENVIRONMENT) | ||||
| 
 | ||||
|     if gitea_api_token: | ||||
|         log.msg("Found Gitea API token, enabling status push") | ||||
|         return gitea.reporter.GiteaStatusService11( | ||||
|             gitea_url, gitea_api_token, verbose=False | ||||
|         ) | ||||
|     else: | ||||
|         log.msg("No Gitea API token found, status push disabled") | ||||
|         return None | ||||
| 
 | ||||
| 
 | ||||
| # Get revision for coordinator. | ||||
| @buildbot.plugins.util.renderer | ||||
| def get_patch_revision(props): | ||||
|     if "revision" in props and props["revision"]: | ||||
|         return {} | ||||
|     if "pull_revision" in props and props["pull_revision"]: | ||||
|         return {"revision": props["pull_revision"]} | ||||
|     pull_id = props["patch_id"] | ||||
|     url = f"{gitea_url}/api/v1/repos/blender/blender/pulls/{pull_id}" | ||||
|     response = requests.get(url, headers={"accept": "application/json"}) | ||||
|     sha = response.json().get("head", {"sha": ""}).get("sha") | ||||
|     return {"revision": sha} | ||||
| 
 | ||||
| 
 | ||||
| @buildbot.plugins.util.renderer | ||||
| def get_branch_revision(props): | ||||
|     if "revision" in props and props["revision"]: | ||||
|         return {} | ||||
|     branch = props["override_branch_id"] | ||||
|     url = f"{gitea_url}/api/v1/repos/blender/blender/git/commits/{branch}" | ||||
|     response = requests.get(url, headers={"accept": "application/json"}) | ||||
|     sha = response.json().get("sha", "") | ||||
|     return {"revision": sha} | ||||
							
								
								
									
										294
									
								
								config/gitea/reporter.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										294
									
								
								config/gitea/reporter.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,294 @@ | |||
| # SPDX-License-Identifier: MIT | ||||
| # SPDX-FileCopyrightText: 2018 LAB132 | ||||
| # SPDX-FileCopyrightText: 2013-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| # Based on the gitlab reporter from buildbot | ||||
| 
 | ||||
| from __future__ import absolute_import | ||||
| from __future__ import print_function | ||||
| 
 | ||||
| from twisted.internet import defer | ||||
| from twisted.python import log | ||||
| 
 | ||||
| from buildbot.process.properties import Interpolate | ||||
| from buildbot.process.properties import Properties | ||||
| from buildbot.process.results import CANCELLED | ||||
| from buildbot.process.results import EXCEPTION | ||||
| from buildbot.process.results import FAILURE | ||||
| from buildbot.process.results import RETRY | ||||
| from buildbot.process.results import SKIPPED | ||||
| from buildbot.process.results import SUCCESS | ||||
| from buildbot.process.results import WARNINGS | ||||
| from buildbot.reporters import http | ||||
| from buildbot.util import httpclientservice | ||||
| from buildbot.reporters.generators.build import BuildStartEndStatusGenerator | ||||
| from buildbot.reporters.message import MessageFormatterRenderable | ||||
| 
 | ||||
| 
 | ||||
| import re | ||||
| 
 | ||||
| 
 | ||||
| # This name has a number in it to trick buildbot into reloading it on without | ||||
| # restart. Needs to be incremented every time this file is changed. Is there | ||||
| # a better solution? | ||||
| class GiteaStatusService11(http.ReporterBase): | ||||
|     name = "GiteaStatusService11" | ||||
|     ssh_url_match = re.compile( | ||||
|         r"(ssh://)?[\w+\-\_]+@[\w\.\-\_]+:?(\d*/)?(?P<owner>[\w_\-\.]+)/(?P<repo_name>[\w_\-\.]+?)(\.git)?$" | ||||
|     ) | ||||
| 
 | ||||
|     def checkConfig( | ||||
|         self, | ||||
|         baseURL, | ||||
|         token, | ||||
|         context=None, | ||||
|         context_pr=None, | ||||
|         verbose=False, | ||||
|         debug=None, | ||||
|         verify=None, | ||||
|         generators=None, | ||||
|         warningAsSuccess=False, | ||||
|         **kwargs, | ||||
|     ): | ||||
|         if generators is None: | ||||
|             generators = self._create_default_generators() | ||||
| 
 | ||||
|         super().checkConfig(generators=generators, **kwargs) | ||||
|         httpclientservice.HTTPClientService.checkAvailable(self.__class__.__name__) | ||||
| 
 | ||||
|     @defer.inlineCallbacks | ||||
|     def reconfigService( | ||||
|         self, | ||||
|         baseURL, | ||||
|         token, | ||||
|         context=None, | ||||
|         context_pr=None, | ||||
|         verbose=False, | ||||
|         debug=None, | ||||
|         verify=None, | ||||
|         generators=None, | ||||
|         warningAsSuccess=False, | ||||
|         **kwargs, | ||||
|     ): | ||||
|         token = yield self.renderSecrets(token) | ||||
|         self.debug = debug | ||||
|         self.verify = verify | ||||
|         self.verbose = verbose | ||||
|         if generators is None: | ||||
|             generators = self._create_default_generators() | ||||
| 
 | ||||
|         yield super().reconfigService(generators=generators, **kwargs) | ||||
| 
 | ||||
|         self.context = context or Interpolate("buildbot/%(prop:buildername)s") | ||||
|         self.context_pr = context_pr or Interpolate( | ||||
|             "buildbot/pull_request/%(prop:buildername)s" | ||||
|         ) | ||||
|         if baseURL.endswith("/"): | ||||
|             baseURL = baseURL[:-1] | ||||
|         self.baseURL = baseURL | ||||
|         self._http = yield httpclientservice.HTTPClientService.getService( | ||||
|             self.master, | ||||
|             baseURL, | ||||
|             headers={"Authorization": "token {}".format(token)}, | ||||
|             debug=self.debug, | ||||
|             verify=self.verify, | ||||
|         ) | ||||
|         self.verbose = verbose | ||||
|         self.project_ids = {} | ||||
|         self.warningAsSuccess = warningAsSuccess | ||||
| 
 | ||||
|     def _create_default_generators(self): | ||||
|         start_formatter = MessageFormatterRenderable("Build started.") | ||||
|         end_formatter = MessageFormatterRenderable("Build done.") | ||||
| 
 | ||||
|         return [ | ||||
|             BuildStartEndStatusGenerator( | ||||
|                 start_formatter=start_formatter, end_formatter=end_formatter | ||||
|             ) | ||||
|         ] | ||||
| 
 | ||||
|     def createStatus( | ||||
|         self, | ||||
|         project_owner, | ||||
|         repo_name, | ||||
|         sha, | ||||
|         state, | ||||
|         target_url=None, | ||||
|         description=None, | ||||
|         context=None, | ||||
|     ): | ||||
|         """ | ||||
|         :param project_owner: username of the owning user or organization | ||||
|         :param repo_name: name of the repository | ||||
|         :param sha: Full sha to create the status for. | ||||
|         :param state: one of the following 'pending', 'success', 'failed' | ||||
|                       or 'cancelled'. | ||||
|         :param target_url: Target url to associate with this status. | ||||
|         :param description: Short description of the status. | ||||
|         :param context: Context of the result | ||||
|         :return: A deferred with the result from GitLab. | ||||
| 
 | ||||
|         """ | ||||
|         payload = {"state": state} | ||||
| 
 | ||||
|         if description is not None: | ||||
|             payload["description"] = description | ||||
| 
 | ||||
|         if target_url is not None: | ||||
|             payload["target_url"] = target_url | ||||
| 
 | ||||
|         if context is not None: | ||||
|             payload["context"] = context | ||||
| 
 | ||||
|         url = "/api/v1/repos/{owner}/{repository}/statuses/{sha}".format( | ||||
|             owner=project_owner, repository=repo_name, sha=sha | ||||
|         ) | ||||
|         log.msg(f"Sending status to {url}: {payload}") | ||||
| 
 | ||||
|         return self._http.post(url, json=payload) | ||||
| 
 | ||||
|     @defer.inlineCallbacks | ||||
|     def sendMessage(self, reports): | ||||
|         yield self._send_impl(reports) | ||||
| 
 | ||||
|     @defer.inlineCallbacks | ||||
|     def _send_status( | ||||
|         self, build, repository_owner, repository_name, sha, state, context, description | ||||
|     ): | ||||
|         try: | ||||
|             target_url = build["url"] | ||||
|             res = yield self.createStatus( | ||||
|                 project_owner=repository_owner, | ||||
|                 repo_name=repository_name, | ||||
|                 sha=sha, | ||||
|                 state=state, | ||||
|                 target_url=target_url, | ||||
|                 context=context, | ||||
|                 description=description, | ||||
|             ) | ||||
|             if res.code not in (200, 201, 204): | ||||
|                 message = yield res.json() | ||||
|                 message = message.get("message", "unspecified error") | ||||
|                 log.msg( | ||||
|                     'Could not send status "{state}" for ' | ||||
|                     "{repo} at {sha}: {code} : {message}".format( | ||||
|                         state=state, | ||||
|                         repo=repository_name, | ||||
|                         sha=sha, | ||||
|                         code=res.code, | ||||
|                         message=message, | ||||
|                     ) | ||||
|                 ) | ||||
|             elif self.verbose: | ||||
|                 log.msg( | ||||
|                     'Status "{state}" sent for ' "{repo} at {sha}.".format( | ||||
|                         state=state, repo=repository_name, sha=sha | ||||
|                     ) | ||||
|                 ) | ||||
|         except Exception as e: | ||||
|             log.err( | ||||
|                 e, | ||||
|                 'Failed to send status "{state}" for ' "{repo} at {sha}".format( | ||||
|                     state=state, repo=repository_name, sha=sha | ||||
|                 ), | ||||
|             ) | ||||
| 
 | ||||
|     @defer.inlineCallbacks | ||||
|     def _send_impl(self, reports): | ||||
|         for report in reports: | ||||
|             try: | ||||
|                 builds = report["builds"] | ||||
|             except KeyError: | ||||
|                 continue | ||||
| 
 | ||||
|             for build in builds: | ||||
|                 builder_name = build["builder"]["name"] | ||||
| 
 | ||||
|                 props = Properties.fromDict(build["properties"]) | ||||
|                 props.master = self.master | ||||
| 
 | ||||
|                 description = report.get("body", None) | ||||
| 
 | ||||
|                 if build["complete"]: | ||||
|                     state = { | ||||
|                         SUCCESS: "success", | ||||
|                         WARNINGS: "success" if self.warningAsSuccess else "warning", | ||||
|                         FAILURE: "failure", | ||||
|                         SKIPPED: "success", | ||||
|                         EXCEPTION: "error", | ||||
|                         RETRY: "pending", | ||||
|                         CANCELLED: "error", | ||||
|                     }.get(build["results"], "failure") | ||||
|                 else: | ||||
|                     state = "pending" | ||||
| 
 | ||||
|                 if "pr_id" in props: | ||||
|                     context = yield props.render(self.context_pr) | ||||
|                 else: | ||||
|                     context = yield props.render(self.context) | ||||
| 
 | ||||
|                 sourcestamps = build["buildset"]["sourcestamps"] | ||||
| 
 | ||||
|                 # BLENDER: some hardcoded logic for now. | ||||
|                 if ( | ||||
|                     "-code-daily-" in builder_name | ||||
|                     or "-code-patch-" in builder_name | ||||
|                     or "-code-experimental-" in builder_name | ||||
|                 ): | ||||
|                     repository_owner = "blender" | ||||
|                     repository_name = "blender" | ||||
|                 elif "-doc-manual-" in builder_name: | ||||
|                     repository_owner = "blender" | ||||
|                     repository_name = "blender-manual" | ||||
|                 elif "-doc-developer" in builder_name: | ||||
|                     repository_owner = "blender" | ||||
|                     repository_name = "blender-developer-docs" | ||||
|                 else: | ||||
|                     continue | ||||
| 
 | ||||
|                 # Source change from Git poller. | ||||
|                 for sourcestamp in sourcestamps: | ||||
|                     sha = sourcestamp["revision"] | ||||
|                     if sha not in {None, "", "HEAD"}: | ||||
|                         self._send_status( | ||||
|                             build, | ||||
|                             repository_owner, | ||||
|                             repository_name, | ||||
|                             sha, | ||||
|                             state, | ||||
|                             context, | ||||
|                             description, | ||||
|                         ) | ||||
|                         continue | ||||
| 
 | ||||
|                 # Revision specified by get-revision step. | ||||
|                 if "revision" in props: | ||||
|                     sha = props["revision"] | ||||
|                     if sha not in {None, "", "HEAD"}: | ||||
|                         self._send_status( | ||||
|                             build, | ||||
|                             repository_owner, | ||||
|                             repository_name, | ||||
|                             sha, | ||||
|                             state, | ||||
|                             context, | ||||
|                             description, | ||||
|                         ) | ||||
| 
 | ||||
|                 # Revision from blender-bot, so we can send a status before | ||||
|                 # the get-revision step runs. | ||||
|                 if "pull_revision" in props: | ||||
|                     sha = props["pull_revision"] | ||||
|                     if sha not in {None, "", "HEAD"}: | ||||
|                         self._send_status( | ||||
|                             build, | ||||
|                             repository_owner, | ||||
|                             repository_name, | ||||
|                             sha, | ||||
|                             state, | ||||
|                             context, | ||||
|                             description, | ||||
|                         ) | ||||
| 
 | ||||
|                 continue | ||||
							
								
								
									
										101
									
								
								config/pipeline/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										101
									
								
								config/pipeline/__init__.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,101 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import importlib | ||||
| 
 | ||||
| from buildbot.plugins import changes as plugins_changes | ||||
| 
 | ||||
| import conf.branches | ||||
| 
 | ||||
| import pipeline.common | ||||
| import pipeline.code | ||||
| import pipeline.code_benchmark | ||||
| import pipeline.code_deploy | ||||
| import pipeline.code_bpy_deploy | ||||
| import pipeline.code_store | ||||
| import pipeline.doc_api | ||||
| import pipeline.doc_manual | ||||
| import pipeline.doc_developer | ||||
| import pipeline.doc_studio | ||||
| 
 | ||||
| importlib.reload(pipeline.common) | ||||
| importlib.reload(conf.branches) | ||||
| 
 | ||||
| 
 | ||||
| def populate(ENVIRONMENT): | ||||
|     pipelines_modules = [ | ||||
|         pipeline.code, | ||||
|         pipeline.code_benchmark, | ||||
|         pipeline.code_deploy, | ||||
|         pipeline.code_bpy_deploy, | ||||
|         pipeline.code_store, | ||||
|         pipeline.doc_api, | ||||
|         pipeline.doc_manual, | ||||
|         pipeline.doc_developer, | ||||
|         pipeline.doc_studio, | ||||
|     ] | ||||
| 
 | ||||
|     builders = [] | ||||
|     schedulers = [] | ||||
| 
 | ||||
|     for pipelines_module in pipelines_modules: | ||||
|         importlib.reload(pipelines_module) | ||||
|         b, s = pipelines_module.populate(ENVIRONMENT) | ||||
|         builders += b | ||||
|         schedulers += s | ||||
| 
 | ||||
|     return builders, schedulers | ||||
| 
 | ||||
| 
 | ||||
| def change_sources(): | ||||
|     branch_ids = list(conf.branches.code_tracked_branch_ids.values()) | ||||
| 
 | ||||
|     pollers = [] | ||||
|     poll_interval_in_seconds = 2 * 60 | ||||
| 
 | ||||
|     pollers += [ | ||||
|         plugins_changes.GitPoller( | ||||
|             repourl="https://projects.blender.org/blender/blender.git", | ||||
|             pollAtLaunch=True, | ||||
|             pollInterval=poll_interval_in_seconds, | ||||
|             workdir="blender-gitpoller-workdir", | ||||
|             project="blender.git", | ||||
|             branches=branch_ids, | ||||
|         ) | ||||
|     ] | ||||
| 
 | ||||
|     pollers += [ | ||||
|         plugins_changes.GitPoller( | ||||
|             repourl="https://projects.blender.org/blender/blender-manual.git", | ||||
|             pollAtLaunch=True, | ||||
|             pollInterval=poll_interval_in_seconds, | ||||
|             workdir="blender-manual-gitpoller-workdir", | ||||
|             project="blender-manual.git", | ||||
|             branches=branch_ids, | ||||
|         ) | ||||
|     ] | ||||
| 
 | ||||
|     pollers += [ | ||||
|         plugins_changes.GitPoller( | ||||
|             repourl="https://projects.blender.org/blender/blender-developer-docs.git", | ||||
|             pollAtLaunch=True, | ||||
|             pollInterval=poll_interval_in_seconds, | ||||
|             workdir="blender-developer-docs-gitpoller-workdir", | ||||
|             project="blender-developer-docs.git", | ||||
|             branches=["main"], | ||||
|         ) | ||||
|     ] | ||||
| 
 | ||||
|     pollers += [ | ||||
|         plugins_changes.GitPoller( | ||||
|             repourl="https://projects.blender.org/studio/blender-studio-tools.git", | ||||
|             pollAtLaunch=True, | ||||
|             pollInterval=poll_interval_in_seconds, | ||||
|             workdir="blender-studio-tools-gitpoller-workdir", | ||||
|             project="blender-studio-tools.git", | ||||
|             branches=["main"], | ||||
|         ) | ||||
|     ] | ||||
| 
 | ||||
|     return pollers | ||||
							
								
								
									
										808
									
								
								config/pipeline/code.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										808
									
								
								config/pipeline/code.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,808 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| from functools import partial | ||||
| import pathlib | ||||
| import random | ||||
| 
 | ||||
| import buildbot.plugins | ||||
| 
 | ||||
| from buildbot.plugins import steps as plugins_steps | ||||
| from buildbot.plugins import schedulers as plugins_schedulers | ||||
| 
 | ||||
| import conf.branches | ||||
| import conf.worker | ||||
| 
 | ||||
| import pipeline.common | ||||
| import gitea.reporter | ||||
| 
 | ||||
| # Timeouts. | ||||
| default_step_timeout_in_seconds = 10 * 60 | ||||
| # TODO: Compile step needs more because of the link on Windows | ||||
| compile_code_step_timeout_in_seconds = 10 * 60 | ||||
| compile_gpu_step_timeout_in_seconds = 1.5 * 60 * 60 | ||||
| 
 | ||||
| tree_stable_timer_in_seconds = 15 * 60 | ||||
| 
 | ||||
| package_step_timeout_in_seconds = 20 * 60 | ||||
| 
 | ||||
| # Build steps. | ||||
| code_pipeline_general_step_names = [ | ||||
|     "configure-machine", | ||||
|     "update-code", | ||||
|     "compile-code", | ||||
|     "compile-gpu", | ||||
|     "compile-install", | ||||
|     "test-code", | ||||
|     "sign-code-binaries", | ||||
|     "package-code-binaries", | ||||
|     "deliver-code-binaries", | ||||
|     "deliver-test-results", | ||||
|     "clean", | ||||
| ] | ||||
| 
 | ||||
| code_pipeline_daily_step_names = code_pipeline_general_step_names | ||||
| 
 | ||||
| code_pipeline_patch_step_names = [ | ||||
|     "configure-machine", | ||||
|     "update-code", | ||||
|     "compile-code", | ||||
|     "compile-gpu", | ||||
|     "compile-install", | ||||
|     "test-code", | ||||
|     "sign-code-binaries", | ||||
|     "package-code-binaries", | ||||
|     "deliver-code-binaries", | ||||
|     "deliver-test-results", | ||||
|     "clean", | ||||
| ] | ||||
| 
 | ||||
| code_pipeline_experimental_step_names = code_pipeline_general_step_names | ||||
| 
 | ||||
| pipeline_types_step_names = { | ||||
|     "daily": code_pipeline_daily_step_names, | ||||
|     "patch": code_pipeline_patch_step_names, | ||||
|     "experimental": code_pipeline_experimental_step_names, | ||||
| } | ||||
| 
 | ||||
| code_pipeline_lint_step_names = [ | ||||
|     "configure-machine", | ||||
|     "update-code", | ||||
|     "lint-code", | ||||
| ] | ||||
| 
 | ||||
| # Steps for testing. | ||||
| code_pipeline_test_step_names = [ | ||||
|     "test-code", | ||||
| ] | ||||
| 
 | ||||
| # Steps for package delivery. | ||||
| code_delivery_step_names = [ | ||||
|     "sign-code-binaries", | ||||
|     "package-code-binaries", | ||||
|     "deliver-code-binaries", | ||||
| ] | ||||
| 
 | ||||
| # Steps skipped for Python module. | ||||
| code_python_module_skip_test_names = ["sign-code-binaries"] | ||||
| 
 | ||||
| 
 | ||||
| # Tracks. | ||||
| code_tracked_branch_ids = conf.branches.code_tracked_branch_ids | ||||
| code_track_ids = list(code_tracked_branch_ids.keys()) | ||||
| code_all_platform_architectures = conf.branches.code_all_platform_architectures | ||||
| code_official_platform_architectures = ( | ||||
|     conf.branches.code_official_platform_architectures | ||||
| ) | ||||
| 
 | ||||
| code_track_pipeline_types = {} | ||||
| track_properties = {} | ||||
| for track, branch in code_tracked_branch_ids.items(): | ||||
|     if track == "vdev": | ||||
|         code_track_pipeline_types[track] = ["daily"] | ||||
|     elif track == "vexp": | ||||
|         code_track_pipeline_types[track] = ["experimental", "patch"] | ||||
|     else: | ||||
|         code_track_pipeline_types[track] = ["daily"] | ||||
| 
 | ||||
|     # Track properties. | ||||
|     track_properties[track] = [ | ||||
|         buildbot.plugins.util.ChoiceStringParameter( | ||||
|             name="platform_architectures", | ||||
|             label="Platforms:", | ||||
|             required=True, | ||||
|             choices=code_all_platform_architectures[track], | ||||
|             multiple=True, | ||||
|             strict=True, | ||||
|             default=code_official_platform_architectures[track], | ||||
|         ), | ||||
|     ] | ||||
| 
 | ||||
| # Scheduler properties. | ||||
| scheduler_properties_common = [ | ||||
|     buildbot.plugins.util.BooleanParameter( | ||||
|         name="python_module", | ||||
|         label="Python module -> build bpy module instead of Blender", | ||||
|         required=True, | ||||
|         strict=True, | ||||
|         default=False, | ||||
|     ), | ||||
|     buildbot.plugins.util.BooleanParameter( | ||||
|         name="needs_full_clean", | ||||
|         label="Full clean -> removes build workspace on machine", | ||||
|         required=True, | ||||
|         strict=True, | ||||
|         default=False, | ||||
|     ), | ||||
|     buildbot.plugins.util.BooleanParameter( | ||||
|         name="needs_package_delivery", | ||||
|         label="Package delivery -> push files to configured services", | ||||
|         required=True, | ||||
|         strict=True, | ||||
|         default=False, | ||||
|     ), | ||||
|     buildbot.plugins.util.BooleanParameter( | ||||
|         name="needs_gpu_binaries", | ||||
|         label="GPU binaries -> build Cycles GPU kernels", | ||||
|         required=True, | ||||
|         strict=True, | ||||
|         default=False, | ||||
|     ), | ||||
|     buildbot.plugins.util.BooleanParameter( | ||||
|         name="needs_gpu_tests", | ||||
|         label="GPU tests -> run EEVEE, Viewport and Cycles GPU tests", | ||||
|         required=True, | ||||
|         strict=True, | ||||
|         default=False, | ||||
|     ), | ||||
| ] | ||||
| 
 | ||||
| # code-daily | ||||
| scheduler_properties_daily = scheduler_properties_common | ||||
| 
 | ||||
| # code-experimental properties. | ||||
| scheduler_properties_experimental = [ | ||||
|     buildbot.plugins.util.StringParameter( | ||||
|         name="override_branch_id", | ||||
|         label="Branch:", | ||||
|         required=True, | ||||
|         size=80, | ||||
|         regex=r"^[a-zA-Z0-9][A-Za-z0-9\._-]*$", | ||||
|         default="", | ||||
|     ), | ||||
|     buildbot.plugins.util.ChoiceStringParameter( | ||||
|         name="build_configuration", | ||||
|         label="Configuration:", | ||||
|         required=True, | ||||
|         choices=["release", "sanitizer", "debug"], | ||||
|         multiple=False, | ||||
|         strict=True, | ||||
|         default="release", | ||||
|     ), | ||||
|     buildbot.plugins.util.BooleanParameter( | ||||
|         name="needs_skip_tests", | ||||
|         label="Skip tests -> bypass running all tests", | ||||
|         required=True, | ||||
|         strict=True, | ||||
|         default=False, | ||||
|     ), | ||||
| ] | ||||
| scheduler_properties_experimental += scheduler_properties_common | ||||
| 
 | ||||
| 
 | ||||
| # code-patch properties. | ||||
| scheduler_properties_patch = [ | ||||
|     buildbot.plugins.util.StringParameter( | ||||
|         name="patch_id", label="Patch Id:", required=True, size=80, default="" | ||||
|     ), | ||||
|     buildbot.plugins.util.ChoiceStringParameter( | ||||
|         name="build_configuration", | ||||
|         label="Configuration:", | ||||
|         required=True, | ||||
|         choices=["release", "sanitizer", "debug"], | ||||
|         multiple=False, | ||||
|         strict=True, | ||||
|         default="release", | ||||
|     ), | ||||
|     buildbot.plugins.util.BooleanParameter( | ||||
|         name="needs_skip_tests", | ||||
|         label="Skip tests -> bypass running all tests", | ||||
|         required=True, | ||||
|         strict=True, | ||||
|         default=False, | ||||
|     ), | ||||
|     buildbot.plugins.util.StringParameter( | ||||
|         name="pull_revision", | ||||
|         label="Pull Revision:", | ||||
|         required=False, | ||||
|         hide=True, | ||||
|         size=80, | ||||
|         default="", | ||||
|     ), | ||||
| ] | ||||
| 
 | ||||
| scheduler_properties_patch += scheduler_properties_common | ||||
| 
 | ||||
| scheduler_properties = { | ||||
|     "code-daily": scheduler_properties_daily, | ||||
|     "code-experimental": scheduler_properties_experimental, | ||||
|     "code-patch": scheduler_properties_patch, | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| @buildbot.plugins.util.renderer | ||||
| def create_code_worker_command_args( | ||||
|     props, ENVIRONMENT, track_id, pipeline_type, step_name | ||||
| ): | ||||
|     commit_id = pipeline.common.fetch_property(props, key="revision", default="HEAD") | ||||
|     patch_id = pipeline.common.fetch_property(props, key="patch_id", default="") | ||||
|     override_branch_id = pipeline.common.fetch_property( | ||||
|         props, key="override_branch_id", default="" | ||||
|     ) | ||||
|     python_module = pipeline.common.fetch_property( | ||||
|         props, key="python_module", default=False | ||||
|     ) | ||||
|     needs_gpu_tests = pipeline.common.fetch_property( | ||||
|         props, key="needs_gpu_tests", default=False | ||||
|     ) | ||||
|     needs_gpu_binaries = pipeline.common.fetch_property( | ||||
|         props, key="needs_gpu_binaries", default=False | ||||
|     ) | ||||
|     build_configuration = pipeline.common.fetch_property( | ||||
|         props, key="build_configuration", default="release" | ||||
|     ) | ||||
|     needs_full_clean = pipeline.common.fetch_property( | ||||
|         props, key="needs_full_clean", default="false" | ||||
|     ) | ||||
|     needs_full_clean = needs_full_clean in ["true", True] | ||||
|     needs_package_delivery = pipeline.common.fetch_property( | ||||
|         props, key="needs_package_delivery", default="false" | ||||
|     ) | ||||
|     needs_package_delivery = needs_package_delivery in ["true", True] | ||||
| 
 | ||||
|     # Auto enable asserts when not using package delivery. Only support in 4.1+. | ||||
|     if track_id not in ("v360"): | ||||
|         if build_configuration == "release" and not needs_package_delivery: | ||||
|             build_configuration = "asserts" | ||||
| 
 | ||||
|     platform_id, architecture = pipeline.common.fetch_platform_architecture(props) | ||||
| 
 | ||||
|     args = [] | ||||
| 
 | ||||
|     if architecture: | ||||
|         args += ["--architecture", architecture] | ||||
| 
 | ||||
|     if pipeline_type == "patch": | ||||
|         # Powershell doesn't like # in string argument so strip it. | ||||
|         args += ["--patch-id", patch_id.lstrip("#")] | ||||
|     elif pipeline_type == "experimental": | ||||
|         args += ["--branch-id", override_branch_id] | ||||
| 
 | ||||
|     args += ["--commit-id", commit_id] | ||||
|     args += ["--build-configuration", build_configuration] | ||||
| 
 | ||||
|     if python_module: | ||||
|         args += ["--python-module"] | ||||
|     if needs_full_clean: | ||||
|         args += ["--needs-full-clean"] | ||||
|     if step_name in ["compile-gpu", "compile-install", "test-code"]: | ||||
|         if needs_package_delivery or needs_gpu_binaries: | ||||
|             args += ["--needs-gpu-binaries"] | ||||
|         if needs_gpu_tests: | ||||
|             args += ["--needs-gpu-tests"] | ||||
| 
 | ||||
|     args += [step_name] | ||||
| 
 | ||||
|     return pipeline.common.create_worker_command("code.py", ENVIRONMENT, track_id, args) | ||||
| 
 | ||||
| 
 | ||||
| def needs_do_code_pipeline_step(step): | ||||
|     # Use this to test master steps only, otherwise we be waiting for 30 minutes | ||||
|     needs_master_steps_only = False | ||||
| 
 | ||||
|     if needs_master_steps_only: | ||||
|         is_master_step = step.name in pipeline.common.code_pipeline_master_step_names | ||||
|         return is_master_step | ||||
| 
 | ||||
|     step.getWorkerName() | ||||
| 
 | ||||
|     is_package_delivery_step = (step.name in code_delivery_step_names) or ( | ||||
|         step.name in pipeline.common.code_pipeline_master_step_names | ||||
|     ) | ||||
|     needs_package_delivery = step.getProperty("needs_package_delivery") | ||||
|     needs_gpu_binaries = step.getProperty("needs_gpu_binaries") | ||||
|     needs_skip_tests = step.getProperty("needs_skip_tests") | ||||
| 
 | ||||
|     python_module = step.getProperty("python_module") | ||||
| 
 | ||||
|     needs_do_it = True | ||||
| 
 | ||||
|     if step.name in code_pipeline_test_step_names: | ||||
|         needs_do_it = not needs_skip_tests | ||||
|     elif step.name == "compile-gpu": | ||||
|         needs_do_it = needs_package_delivery or needs_gpu_binaries | ||||
|     elif is_package_delivery_step: | ||||
|         needs_do_it = needs_package_delivery | ||||
| 
 | ||||
|     if python_module and (step.name in code_python_module_skip_test_names): | ||||
|         needs_do_it = False | ||||
| 
 | ||||
|     return needs_do_it | ||||
| 
 | ||||
| 
 | ||||
| # Custom file upload that shows links to download files. | ||||
| class LinkMultipleFileUpload(plugins_steps.MultipleFileUpload): | ||||
|     def uploadDone(self, result, source, masterdest): | ||||
|         if not self.url: | ||||
|             return | ||||
| 
 | ||||
|         name = pathlib.Path(source).name | ||||
|         if name.endswith(".zip"): | ||||
|             self.addURL(name, self.url + "/" + name) | ||||
|         else: | ||||
|             self.addURL(name, self.url + "/" + name + "/report.html") | ||||
| 
 | ||||
|     def allUploadsDone(self, result, sources, masterdest): | ||||
|         return | ||||
| 
 | ||||
| 
 | ||||
| def create_deliver_code_binaries_step(worker_config, track_id, pipeline_type): | ||||
|     file_size_in_mb = 500 * 1024 * 1024 | ||||
|     worker_source_path = pathlib.Path( | ||||
|         f"../../../../git/blender-{track_id}/build_package" | ||||
|     ) | ||||
|     master_dest_path = pathlib.Path( | ||||
|         f"{worker_config.buildbot_download_folder}/{pipeline_type}" | ||||
|     ).expanduser() | ||||
| 
 | ||||
|     return plugins_steps.MultipleFileUpload( | ||||
|         name="deliver-code-binaries", | ||||
|         maxsize=file_size_in_mb, | ||||
|         workdir=f"{worker_source_path}", | ||||
|         glob=True, | ||||
|         workersrcs=["*.*"], | ||||
|         masterdest=f"{master_dest_path}", | ||||
|         mode=0o644, | ||||
|         url=None, | ||||
|         description="running", | ||||
|         descriptionDone="completed", | ||||
|         doStepIf=needs_do_code_pipeline_step, | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def create_deliver_test_results_step(worker_config, track_id, pipeline_type): | ||||
|     file_size_in_mb = 500 * 1024 * 1024 | ||||
|     worker_source_path = pathlib.Path( | ||||
|         f"../../../../git/blender-{track_id}/build_package" | ||||
|     ) | ||||
|     master_dest_path = pathlib.Path( | ||||
|         f"{worker_config.buildbot_download_folder}/{pipeline_type}" | ||||
|     ).expanduser() | ||||
| 
 | ||||
|     tests_worker_source_path = worker_source_path / "tests" | ||||
|     tests_master_dest_path = master_dest_path / "tests" | ||||
|     tests_worker_srcs = ["tests-*.zip"] | ||||
| 
 | ||||
|     branch_id = code_tracked_branch_ids[track_id] | ||||
|     if branch_id: | ||||
|         branch_id = branch_id.replace("blender-", "").replace("-release", "") | ||||
|         tests_worker_srcs.append(branch_id + "-*") | ||||
| 
 | ||||
|     return LinkMultipleFileUpload( | ||||
|         name="deliver-test-results", | ||||
|         maxsize=file_size_in_mb, | ||||
|         workdir=f"{tests_worker_source_path}", | ||||
|         glob=True, | ||||
|         workersrcs=tests_worker_srcs, | ||||
|         masterdest=f"{tests_master_dest_path}", | ||||
|         mode=0o644, | ||||
|         url=f"../download/{pipeline_type}/tests", | ||||
|         description="running", | ||||
|         descriptionDone="completed", | ||||
|         alwaysRun=True, | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def next_worker_code(worker_names_gpu, builder, workers, request): | ||||
|     # Use a GPU worker if needed and supported for this platform. | ||||
|     # NVIDIA worker is currently reserved for GPU builds only. | ||||
|     compatible_workers = [] | ||||
|     if request.properties.getProperty("needs_gpu_tests", False) and worker_names_gpu: | ||||
|         for worker in workers: | ||||
|             if worker.worker.workername in worker_names_gpu: | ||||
|                 compatible_workers.append(worker) | ||||
|     else: | ||||
|         for worker in workers: | ||||
|             if "nvidia" not in worker.worker.workername: | ||||
|                 compatible_workers.append(worker) | ||||
| 
 | ||||
|     if not compatible_workers: | ||||
|         return None | ||||
| 
 | ||||
|     return random.choice(compatible_workers) | ||||
| 
 | ||||
| 
 | ||||
| class PlatformTrigger(plugins_steps.Trigger): | ||||
|     def getSchedulersAndProperties(self): | ||||
|         schedulers = [] | ||||
| 
 | ||||
|         platform_architectures = self.set_properties["platform_architectures"] | ||||
| 
 | ||||
|         for scheduler in self.schedulerNames: | ||||
|             found = False | ||||
|             if "lint" in scheduler: | ||||
|                 found = True | ||||
|             for platform_architecture in platform_architectures: | ||||
|                 if platform_architecture in scheduler: | ||||
|                     found = True | ||||
| 
 | ||||
|             if found: | ||||
|                 schedulers.append( | ||||
|                     { | ||||
|                         "sched_name": scheduler, | ||||
|                         "props_to_set": self.set_properties, | ||||
|                         "unimportant": False, | ||||
|                     } | ||||
|                 ) | ||||
| 
 | ||||
|         return schedulers | ||||
| 
 | ||||
| 
 | ||||
| def populate(ENVIRONMENT): | ||||
|     builders = [] | ||||
|     schedulers = [] | ||||
| 
 | ||||
|     platform_worker_names = conf.machines.fetch_platform_worker_names(ENVIRONMENT) | ||||
|     local_worker_names = conf.machines.fetch_local_worker_names() | ||||
| 
 | ||||
|     worker_config = conf.worker.get_config(ENVIRONMENT) | ||||
| 
 | ||||
|     needs_incremental_schedulers = ENVIRONMENT in ["PROD"] | ||||
|     needs_nightly_schedulers = ENVIRONMENT in ["PROD"] | ||||
| 
 | ||||
|     print("*** Creating [code] pipeline") | ||||
|     for track_id in code_track_ids: | ||||
|         pipeline_types = code_track_pipeline_types[track_id] | ||||
|         for pipeline_type in pipeline_types: | ||||
|             # Create steps. | ||||
|             step_names = pipeline_types_step_names[pipeline_type] | ||||
|             pipeline_build_factory = buildbot.plugins.util.BuildFactory() | ||||
| 
 | ||||
|             print(f"Creating [{track_id}] [code] [{pipeline_type}] pipeline steps") | ||||
|             for step_name in step_names: | ||||
|                 if step_name == "deliver-code-binaries": | ||||
|                     step = create_deliver_code_binaries_step( | ||||
|                         worker_config, track_id, pipeline_type | ||||
|                     ) | ||||
|                 elif step_name == "deliver-test-results": | ||||
|                     step = create_deliver_test_results_step( | ||||
|                         worker_config, track_id, pipeline_type | ||||
|                     ) | ||||
|                 else: | ||||
|                     needs_halt_on_failure = True | ||||
|                     if step_name in code_pipeline_test_step_names: | ||||
|                         needs_halt_on_failure = track_id != "vexp" | ||||
| 
 | ||||
|                     step_timeout_in_seconds = default_step_timeout_in_seconds | ||||
|                     if step_name == "compile-code": | ||||
|                         step_timeout_in_seconds = compile_code_step_timeout_in_seconds | ||||
|                     elif step_name == "compile-gpu": | ||||
|                         step_timeout_in_seconds = compile_gpu_step_timeout_in_seconds | ||||
| 
 | ||||
|                     step_command = create_code_worker_command_args.withArgs( | ||||
|                         ENVIRONMENT, track_id, pipeline_type, step_name | ||||
|                     ) | ||||
| 
 | ||||
|                     step = buildbot.plugins.steps.ShellCommand( | ||||
|                         name=step_name, | ||||
|                         logEnviron=True, | ||||
|                         haltOnFailure=needs_halt_on_failure, | ||||
|                         timeout=step_timeout_in_seconds, | ||||
|                         description="running", | ||||
|                         descriptionDone="completed", | ||||
|                         doStepIf=needs_do_code_pipeline_step, | ||||
|                         command=step_command, | ||||
|                     ) | ||||
| 
 | ||||
|                 pipeline_build_factory.addStep(step) | ||||
| 
 | ||||
|             for master_step_name in pipeline.common.code_pipeline_master_step_names: | ||||
|                 master_step_command = ( | ||||
|                     pipeline.common.create_master_command_args.withArgs( | ||||
|                         ENVIRONMENT, | ||||
|                         track_id, | ||||
|                         pipeline_type, | ||||
|                         master_step_name, | ||||
|                         single_platform=True, | ||||
|                     ) | ||||
|                 ) | ||||
| 
 | ||||
|                 # Master to archive and purge builds | ||||
|                 pipeline_build_factory.addStep( | ||||
|                     plugins_steps.MasterShellCommand( | ||||
|                         name=master_step_name, | ||||
|                         logEnviron=False, | ||||
|                         command=master_step_command, | ||||
|                         description="running", | ||||
|                         descriptionDone="completed", | ||||
|                         doStepIf=needs_do_code_pipeline_step, | ||||
|                     ) | ||||
|                 ) | ||||
| 
 | ||||
|             # Create lint pipeline | ||||
|             pipeline_lint_factory = buildbot.plugins.util.BuildFactory() | ||||
|             for step_name in code_pipeline_lint_step_names: | ||||
|                 step_command = create_code_worker_command_args.withArgs( | ||||
|                     ENVIRONMENT, track_id, pipeline_type, step_name | ||||
|                 ) | ||||
| 
 | ||||
|                 pipeline_lint_factory.addStep( | ||||
|                     buildbot.plugins.steps.ShellCommand( | ||||
|                         name=step_name, | ||||
|                         logEnviron=True, | ||||
|                         haltOnFailure=True, | ||||
|                         timeout=default_step_timeout_in_seconds, | ||||
|                         description="running", | ||||
|                         descriptionDone="completed", | ||||
|                         command=step_command, | ||||
|                     ) | ||||
|                 ) | ||||
| 
 | ||||
|             triggerable_scheduler_names = [] | ||||
|             trigger_factory = buildbot.plugins.util.BuildFactory() | ||||
| 
 | ||||
|             # Create builders. | ||||
|             for platform_architecture in code_all_platform_architectures[track_id]: | ||||
|                 print( | ||||
|                     f"Creating [{track_id}] [{pipeline_type}] [{platform_architecture}] builders" | ||||
|                 ) | ||||
| 
 | ||||
|                 worker_group_id = f"{platform_architecture}-code" | ||||
|                 worker_group_id_gpu = f"{platform_architecture}-code-gpu" | ||||
| 
 | ||||
|                 pipeline_worker_names = platform_worker_names[worker_group_id] | ||||
|                 pipeline_worker_names_gpu = platform_worker_names[worker_group_id_gpu] | ||||
|                 if pipeline_worker_names: | ||||
|                     # Only create the builders if the worker exists | ||||
|                     pipeline_builder_name = ( | ||||
|                         f"{track_id}-code-{pipeline_type}-{platform_architecture}" | ||||
|                     ) | ||||
|                     pipeline_builder_tags = pipeline_builder_name.split("-") | ||||
| 
 | ||||
|                     # Assigning different workers for different tracks, specifically Linux builders. | ||||
|                     suitable_pipeline_worker_names = pipeline_worker_names | ||||
|                     if ( | ||||
|                         platform_architecture == "linux-x86_64" | ||||
|                         and ENVIRONMENT != "LOCAL" | ||||
|                     ): | ||||
|                         selector = "rocky" | ||||
|                         suitable_pipeline_worker_names = [ | ||||
|                             worker | ||||
|                             for worker in pipeline_worker_names | ||||
|                             if selector in worker | ||||
|                         ] | ||||
| 
 | ||||
|                     builders += [ | ||||
|                         buildbot.plugins.util.BuilderConfig( | ||||
|                             name=pipeline_builder_name, | ||||
|                             workernames=suitable_pipeline_worker_names, | ||||
|                             nextWorker=partial( | ||||
|                                 next_worker_code, pipeline_worker_names_gpu | ||||
|                             ), | ||||
|                             tags=pipeline_builder_tags, | ||||
|                             factory=pipeline_build_factory, | ||||
|                         ) | ||||
|                     ] | ||||
| 
 | ||||
|                     pipeline_scheduler_name = f"{track_id}-code-{pipeline_type}-{platform_architecture}-triggerable" | ||||
|                     triggerable_scheduler_names += [pipeline_scheduler_name] | ||||
| 
 | ||||
|                     schedulers += [ | ||||
|                         plugins_schedulers.Triggerable( | ||||
|                             name=pipeline_scheduler_name, | ||||
|                             builderNames=[pipeline_builder_name], | ||||
|                         ) | ||||
|                     ] | ||||
| 
 | ||||
|             # Create lint builder | ||||
|             if track_id not in conf.branches.all_lts_tracks: | ||||
|                 print(f"Creating [{track_id}] [{pipeline_type}] [lint] builders") | ||||
| 
 | ||||
|                 pipeline_worker_names = platform_worker_names["code-lint"] | ||||
|                 if pipeline_worker_names: | ||||
|                     # Only create the builders if the worker exists | ||||
|                     pipeline_builder_name = f"{track_id}-code-{pipeline_type}-lint" | ||||
|                     pipeline_builder_tags = pipeline_builder_name.split("-") | ||||
| 
 | ||||
|                     builders += [ | ||||
|                         buildbot.plugins.util.BuilderConfig( | ||||
|                             name=pipeline_builder_name, | ||||
|                             workernames=pipeline_worker_names, | ||||
|                             tags=pipeline_builder_tags, | ||||
|                             factory=pipeline_lint_factory, | ||||
|                         ) | ||||
|                     ] | ||||
| 
 | ||||
|                     pipeline_scheduler_name = ( | ||||
|                         f"{track_id}-code-{pipeline_type}-lint-triggerable" | ||||
|                     ) | ||||
|                     triggerable_scheduler_names += [pipeline_scheduler_name] | ||||
| 
 | ||||
|                     schedulers += [ | ||||
|                         plugins_schedulers.Triggerable( | ||||
|                             name=pipeline_scheduler_name, | ||||
|                             builderNames=[pipeline_builder_name], | ||||
|                         ) | ||||
|                     ] | ||||
| 
 | ||||
|             # Create coordinator. | ||||
|             if triggerable_scheduler_names: | ||||
|                 trigger_properties = { | ||||
|                     "python_module": buildbot.plugins.util.Property("python_module"), | ||||
|                     "needs_full_clean": buildbot.plugins.util.Property( | ||||
|                         "needs_full_clean" | ||||
|                     ), | ||||
|                     "needs_package_delivery": buildbot.plugins.util.Property( | ||||
|                         "needs_package_delivery" | ||||
|                     ), | ||||
|                     "needs_gpu_binaries": buildbot.plugins.util.Property( | ||||
|                         "needs_gpu_binaries" | ||||
|                     ), | ||||
|                     "needs_gpu_tests": buildbot.plugins.util.Property( | ||||
|                         "needs_gpu_tests" | ||||
|                     ), | ||||
|                     "needs_skip_tests": buildbot.plugins.util.Property( | ||||
|                         "needs_skip_tests" | ||||
|                     ), | ||||
|                     "platform_architectures": buildbot.plugins.util.Property( | ||||
|                         "platform_architectures" | ||||
|                     ), | ||||
|                 } | ||||
|                 if pipeline_type == "patch": | ||||
|                     trigger_properties["patch_id"] = buildbot.plugins.util.Property( | ||||
|                         "patch_id" | ||||
|                     ) | ||||
|                     trigger_properties["revision"] = buildbot.plugins.util.Property( | ||||
|                         "revision" | ||||
|                     ) | ||||
|                     trigger_properties["build_configuration"] = ( | ||||
|                         buildbot.plugins.util.Property("build_configuration") | ||||
|                     ) | ||||
|                     trigger_factory.addStep( | ||||
|                         plugins_steps.SetProperties( | ||||
|                             name="get-revision", | ||||
|                             properties=gitea.blender.get_patch_revision, | ||||
|                         ) | ||||
|                     ) | ||||
|                 elif pipeline_type == "experimental": | ||||
|                     trigger_properties["override_branch_id"] = ( | ||||
|                         buildbot.plugins.util.Property("override_branch_id") | ||||
|                     ) | ||||
|                     trigger_properties["revision"] = buildbot.plugins.util.Property( | ||||
|                         "revision" | ||||
|                     ) | ||||
|                     trigger_properties["build_configuration"] = ( | ||||
|                         buildbot.plugins.util.Property("build_configuration") | ||||
|                     ) | ||||
|                     trigger_factory.addStep( | ||||
|                         plugins_steps.SetProperties( | ||||
|                             name="get-revision", | ||||
|                             properties=gitea.blender.get_branch_revision, | ||||
|                         ) | ||||
|                     ) | ||||
| 
 | ||||
|                 trigger_factory.addStep( | ||||
|                     PlatformTrigger( | ||||
|                         schedulerNames=triggerable_scheduler_names, | ||||
|                         waitForFinish=True, | ||||
|                         updateSourceStamp=False, | ||||
|                         set_properties=trigger_properties, | ||||
|                         description="running", | ||||
|                         descriptionDone="completed", | ||||
|                     ) | ||||
|                 ) | ||||
| 
 | ||||
|                 coordinator_builder_name = ( | ||||
|                     f"{track_id}-code-{pipeline_type}-coordinator" | ||||
|                 ) | ||||
|                 builder_tags = coordinator_builder_name.split("-") | ||||
| 
 | ||||
|                 builders += [ | ||||
|                     buildbot.plugins.util.BuilderConfig( | ||||
|                         name=coordinator_builder_name, | ||||
|                         workernames=local_worker_names, | ||||
|                         tags=builder_tags, | ||||
|                         factory=trigger_factory, | ||||
|                     ) | ||||
|                 ] | ||||
| 
 | ||||
|                 coordinator_scheduler_name = ( | ||||
|                     f"{track_id}-code-{pipeline_type}-coordinator-force" | ||||
|                 ) | ||||
|                 schedulers += [ | ||||
|                     plugins_schedulers.ForceScheduler( | ||||
|                         name=coordinator_scheduler_name, | ||||
|                         buttonName=f"Trigger {pipeline_type} build", | ||||
|                         builderNames=[coordinator_builder_name], | ||||
|                         codebases=[ | ||||
|                             buildbot.plugins.util.CodebaseParameter( | ||||
|                                 codebase="blender.git", | ||||
|                                 project="blender.git", | ||||
|                                 branch=code_tracked_branch_ids[track_id], | ||||
|                                 hide=True, | ||||
|                             ) | ||||
|                         ], | ||||
|                         properties=track_properties[track_id] | ||||
|                         + scheduler_properties[f"code-{pipeline_type}"], | ||||
|                     ) | ||||
|                 ] | ||||
| 
 | ||||
|                 # Daily scheduler. | ||||
|                 if pipeline_type == "daily": | ||||
|                     print(f"Adding [{pipeline_type}] schedulers") | ||||
|                     if needs_incremental_schedulers and (track_id in code_track_ids): | ||||
|                         incremental_scheduler_name = ( | ||||
|                             f"{track_id}-code-{pipeline_type}-coordinator-incremental" | ||||
|                         ) | ||||
|                         incremental_scheduler_properties = { | ||||
|                             "revision": "HEAD", | ||||
|                             "python_module": False, | ||||
|                             "needs_skip_tests": False, | ||||
|                             "needs_package_delivery": False, | ||||
|                             "needs_gpu_binaries": False, | ||||
|                             "build_configuration": "release", | ||||
|                             "platform_architectures": code_official_platform_architectures[ | ||||
|                                 track_id | ||||
|                             ], | ||||
|                         } | ||||
| 
 | ||||
|                         change_filter = buildbot.plugins.util.ChangeFilter( | ||||
|                             project=["blender.git"], | ||||
|                             branch=code_tracked_branch_ids[track_id], | ||||
|                         ) | ||||
|                         schedulers += [ | ||||
|                             plugins_schedulers.SingleBranchScheduler( | ||||
|                                 name=incremental_scheduler_name, | ||||
|                                 builderNames=[coordinator_builder_name], | ||||
|                                 change_filter=change_filter, | ||||
|                                 properties=incremental_scheduler_properties, | ||||
|                                 treeStableTimer=tree_stable_timer_in_seconds, | ||||
|                             ) | ||||
|                         ] | ||||
| 
 | ||||
|                     if needs_nightly_schedulers and (track_id in code_track_ids): | ||||
|                         nightly_scheduler_name = ( | ||||
|                             f"{track_id}-code-{pipeline_type}-coordinator-nightly" | ||||
|                         ) | ||||
|                         nightly_properties = { | ||||
|                             "revision": "HEAD", | ||||
|                             "python_module": False, | ||||
|                             "needs_skip_tests": False, | ||||
|                             "needs_package_delivery": True, | ||||
|                             "needs_gpu_binaries": True, | ||||
|                             "build_configuration": "release", | ||||
|                             "platform_architectures": code_all_platform_architectures[ | ||||
|                                 track_id | ||||
|                             ], | ||||
|                         } | ||||
|                         nightly_codebases = { | ||||
|                             "blender.git": { | ||||
|                                 "repository": "", | ||||
|                                 "branch": code_tracked_branch_ids[track_id], | ||||
|                                 "revision": None, | ||||
|                             } | ||||
|                         } | ||||
|                         schedulers += [ | ||||
|                             plugins_schedulers.Nightly( | ||||
|                                 name=nightly_scheduler_name, | ||||
|                                 builderNames=[coordinator_builder_name], | ||||
|                                 codebases=nightly_codebases, | ||||
|                                 properties=nightly_properties, | ||||
|                                 onlyIfChanged=False, | ||||
|                                 hour=1, | ||||
|                                 minute=30, | ||||
|                             ) | ||||
|                         ] | ||||
| 
 | ||||
|     return builders, schedulers | ||||
							
								
								
									
										94
									
								
								config/pipeline/code_benchmark.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										94
									
								
								config/pipeline/code_benchmark.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,94 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import pathlib | ||||
| from functools import partial | ||||
| 
 | ||||
| import buildbot.plugins | ||||
| from buildbot.plugins import steps as plugins_steps | ||||
| 
 | ||||
| import conf.branches | ||||
| import conf.worker | ||||
| import pipeline.common | ||||
| 
 | ||||
| 
 | ||||
| # Custom file upload that shows links to download files. | ||||
| class LinkMultipleFileUpload(plugins_steps.MultipleFileUpload): | ||||
|     def uploadDone(self, result, source, masterdest): | ||||
|         if not self.url: | ||||
|             return | ||||
| 
 | ||||
|         name = pathlib.Path(source).name | ||||
|         self.addURL(name, self.url + "/" + name + "/report.html") | ||||
| 
 | ||||
|     def allUploadsDone(self, result, sources, masterdest): | ||||
|         return | ||||
| 
 | ||||
| 
 | ||||
| def create_deliver_step(ENVIRONMENT): | ||||
|     worker_config = conf.worker.get_config(ENVIRONMENT) | ||||
| 
 | ||||
|     file_size_in_mb = 500 * 1024 * 1024 | ||||
|     worker_source_path = pathlib.Path("../../../../git/blender-vdev/build_package") | ||||
|     master_dest_path = worker_config.buildbot_download_folder / "daily" / "benchmarks" | ||||
| 
 | ||||
|     return LinkMultipleFileUpload( | ||||
|         name="deliver", | ||||
|         maxsize=file_size_in_mb, | ||||
|         workdir=f"{worker_source_path}", | ||||
|         glob=True, | ||||
|         workersrcs=["main-*"], | ||||
|         masterdest=f"{master_dest_path}", | ||||
|         mode=0o644, | ||||
|         url="../download/daily/benchmarks", | ||||
|         description="running", | ||||
|         descriptionDone="completed", | ||||
|         alwaysRun=True, | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def populate(ENVIRONMENT): | ||||
|     properties = [ | ||||
|         buildbot.plugins.util.StringParameter( | ||||
|             name="commit_id", | ||||
|             label="Commit:", | ||||
|             required=True, | ||||
|             size=80, | ||||
|             default="HEAD", | ||||
|         ), | ||||
|         buildbot.plugins.util.BooleanParameter( | ||||
|             name="needs_gpu_binaries", | ||||
|             label="GPU binaries -> build Cycles GPU kernels", | ||||
|             required=True, | ||||
|             strict=True, | ||||
|             default=True, | ||||
|             hide=True, | ||||
|         ), | ||||
|     ] | ||||
| 
 | ||||
|     return pipeline.common.create_pipeline( | ||||
|         ENVIRONMENT, | ||||
|         "code-benchmark", | ||||
|         "code_benchmark.py", | ||||
|         [ | ||||
|             "configure-machine", | ||||
|             "update-code", | ||||
|             "compile-code", | ||||
|             "compile-gpu", | ||||
|             "compile-install", | ||||
|             "benchmark", | ||||
|             partial(create_deliver_step, ENVIRONMENT), | ||||
|             "clean", | ||||
|         ], | ||||
|         {"vdev": "main"}, | ||||
|         properties, | ||||
|         "blender.git", | ||||
|         ["linux-x86_64-code-gpu", "darwin-arm64-code-gpu"], | ||||
|         # Compile GPU step needs a long timeout. | ||||
|         default_step_timeout_in_seconds=90 * 60, | ||||
|         variations=["linux", "darwin"], | ||||
|         nightly_properties={"commit_id": "HEAD", "needs_gpu_binaries": True}, | ||||
|         hour=7, | ||||
|         minute=30, | ||||
|     ) | ||||
							
								
								
									
										30
									
								
								config/pipeline/code_bpy_deploy.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										30
									
								
								config/pipeline/code_bpy_deploy.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,30 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| # Builders for deploying Python module releases to PyPI. | ||||
| 
 | ||||
| 
 | ||||
| import conf.branches | ||||
| import pipeline.common | ||||
| 
 | ||||
| 
 | ||||
| def populate(ENVIRONMENT): | ||||
|     properties = [] | ||||
| 
 | ||||
|     return pipeline.common.create_pipeline( | ||||
|         ENVIRONMENT, | ||||
|         "code-bpy-deploy", | ||||
|         "code_bpy_deploy.py", | ||||
|         [ | ||||
|             "configure-machine", | ||||
|             "update-code", | ||||
|             "pull", | ||||
|             "deliver-pypi", | ||||
|             "clean", | ||||
|         ], | ||||
|         conf.branches.code_deploy_track_ids, | ||||
|         properties, | ||||
|         "blender.git", | ||||
|         ["linux-x86_64-general"], | ||||
|     ) | ||||
							
								
								
									
										43
									
								
								config/pipeline/code_deploy.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										43
									
								
								config/pipeline/code_deploy.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,43 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| # Builders for deploying Blender releases. | ||||
| 
 | ||||
| import buildbot.plugins | ||||
| 
 | ||||
| import conf.branches | ||||
| import pipeline.common | ||||
| 
 | ||||
| 
 | ||||
| def populate(ENVIRONMENT): | ||||
|     properties = [ | ||||
|         buildbot.plugins.util.BooleanParameter( | ||||
|             name="needs_full_clean", | ||||
|             label="Full clean -> removes build workspace on machine", | ||||
|             required=True, | ||||
|             strict=True, | ||||
|             default=False, | ||||
|         ), | ||||
|     ] | ||||
| 
 | ||||
|     return pipeline.common.create_pipeline( | ||||
|         ENVIRONMENT, | ||||
|         "code-artifacts-deploy", | ||||
|         "code_deploy.py", | ||||
|         [ | ||||
|             "configure-machine", | ||||
|             "update-code", | ||||
|             "package-source", | ||||
|             "pull-artifacts", | ||||
|             "repackage-artifacts", | ||||
|             "deploy-artifacts", | ||||
|             "monitor-artifacts", | ||||
|             "clean", | ||||
|         ], | ||||
|         conf.branches.code_deploy_track_ids, | ||||
|         properties, | ||||
|         "blender.git", | ||||
|         ["linux-x86_64-general"], | ||||
|         default_step_timeout_in_seconds=30 * 60, | ||||
|     ) | ||||
							
								
								
									
										243
									
								
								config/pipeline/code_store.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										243
									
								
								config/pipeline/code_store.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,243 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| # Builders for releasing Blender to stores. | ||||
| 
 | ||||
| import pathlib | ||||
| 
 | ||||
| import buildbot.plugins | ||||
| 
 | ||||
| from buildbot.plugins import steps as plugins_steps | ||||
| from buildbot.plugins import schedulers as plugins_schedulers | ||||
| 
 | ||||
| import conf.branches | ||||
| import conf.worker | ||||
| import pipeline.common | ||||
| 
 | ||||
| # Timeouts. | ||||
| default_step_timeout_in_seconds = 60 * 60 | ||||
| 
 | ||||
| # Tracks. | ||||
| track_ids = conf.branches.code_store_track_ids | ||||
| tracked_branch_ids = {} | ||||
| for track_id in track_ids: | ||||
|     tracked_branch_ids[track_id] = conf.branches.code_tracked_branch_ids[track_id] | ||||
| 
 | ||||
| # Properties. | ||||
| scheduler_properties = [ | ||||
|     buildbot.plugins.util.ChoiceStringParameter( | ||||
|         name="store_id", | ||||
|         label="Store:", | ||||
|         required=True, | ||||
|         choices=["snap", "steam", "windows"], | ||||
|         multiple=True, | ||||
|         strict=True, | ||||
|         default=["snap", "steam", "windows"], | ||||
|     ), | ||||
| ] | ||||
| 
 | ||||
| 
 | ||||
| def create_deliver_binaries_windows_step(worker_config, track_id, pipeline_type): | ||||
|     # Create step for uploading msix to download.blender.org. | ||||
|     file_size_in_mb = 500 * 1024 * 1024 | ||||
|     worker_source_path = pathlib.Path( | ||||
|         f"../../../../git/blender-{track_id}/build_package" | ||||
|     ) | ||||
|     master_dest_path = pathlib.Path( | ||||
|         f"{worker_config.buildbot_download_folder}/{pipeline_type}" | ||||
|     ).expanduser() | ||||
| 
 | ||||
|     return plugins_steps.MultipleFileUpload( | ||||
|         name="deliver-binaries", | ||||
|         maxsize=file_size_in_mb, | ||||
|         workdir=f"{worker_source_path}", | ||||
|         glob=True, | ||||
|         workersrcs=["*.msix*"], | ||||
|         masterdest=f"{master_dest_path}", | ||||
|         mode=0o644, | ||||
|         url=None, | ||||
|         description="running", | ||||
|         descriptionDone="completed", | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def populate(ENVIRONMENT): | ||||
|     builders = [] | ||||
|     schedulers = [] | ||||
| 
 | ||||
|     platform_worker_names = conf.machines.fetch_platform_worker_names(ENVIRONMENT) | ||||
|     local_worker_names = conf.machines.fetch_local_worker_names() | ||||
| 
 | ||||
|     worker_config = conf.worker.get_config(ENVIRONMENT) | ||||
| 
 | ||||
|     needs_nightly_schedulers = ENVIRONMENT == "PROD" | ||||
| 
 | ||||
|     pipeline_type = "daily" | ||||
| 
 | ||||
|     store_ids = ["steam", "snap", "windows"] | ||||
| 
 | ||||
|     print("*** Creating [code] [store] pipeline") | ||||
|     for track_id in track_ids: | ||||
|         triggerable_scheduler_names = [] | ||||
|         trigger_factory = buildbot.plugins.util.BuildFactory() | ||||
| 
 | ||||
|         for store_id in store_ids: | ||||
|             # Create build steps. | ||||
|             pipeline_build_factory = buildbot.plugins.util.BuildFactory() | ||||
|             step_names = [ | ||||
|                 "configure-machine", | ||||
|                 "update-code", | ||||
|                 "pull-artifacts", | ||||
|                 "package", | ||||
|             ] | ||||
| 
 | ||||
|             if store_id == "windows": | ||||
|                 step_names += ["deliver-binaries"] | ||||
|             else: | ||||
|                 step_names += ["deliver"] | ||||
| 
 | ||||
|             step_names += ["clean"] | ||||
| 
 | ||||
|             print(f"Creating [{track_id}] [code] [store] [{store_id}] pipeline steps") | ||||
|             for step_name in step_names: | ||||
|                 if step_name == "deliver-binaries": | ||||
|                     step = create_deliver_binaries_windows_step( | ||||
|                         worker_config, track_id, pipeline_type | ||||
|                     ) | ||||
|                 else: | ||||
|                     args = ["--store-id", store_id, step_name] | ||||
|                     step_command = pipeline.common.create_worker_command( | ||||
|                         "code_store.py", ENVIRONMENT, track_id, args | ||||
|                     ) | ||||
| 
 | ||||
|                     step = plugins_steps.ShellCommand( | ||||
|                         name=step_name, | ||||
|                         logEnviron=True, | ||||
|                         haltOnFailure=True, | ||||
|                         timeout=default_step_timeout_in_seconds, | ||||
|                         description="running", | ||||
|                         descriptionDone="completed", | ||||
|                         command=step_command, | ||||
|                     ) | ||||
| 
 | ||||
|                 pipeline_build_factory.addStep(step) | ||||
| 
 | ||||
|             for master_step_name in pipeline.common.code_pipeline_master_step_names: | ||||
|                 master_step_command = ( | ||||
|                     pipeline.common.create_master_command_args.withArgs( | ||||
|                         ENVIRONMENT, | ||||
|                         track_id, | ||||
|                         pipeline_type, | ||||
|                         master_step_name, | ||||
|                         single_platform=False, | ||||
|                     ) | ||||
|                 ) | ||||
| 
 | ||||
|                 # Master to archive and purge builds | ||||
|                 pipeline_build_factory.addStep( | ||||
|                     plugins_steps.MasterShellCommand( | ||||
|                         name=master_step_name, | ||||
|                         logEnviron=False, | ||||
|                         command=master_step_command, | ||||
|                         description="running", | ||||
|                         descriptionDone="completed", | ||||
|                     ) | ||||
|                 ) | ||||
| 
 | ||||
|             # Create builders. | ||||
|             worker_group_id = ( | ||||
|                 f"windows-amd64-store-{store_id}" | ||||
|                 if store_id == "windows" | ||||
|                 else f"linux-x86_64-store-{store_id}" | ||||
|             ) | ||||
|             pipeline_worker_names = platform_worker_names[worker_group_id] | ||||
|             if pipeline_worker_names: | ||||
|                 pipeline_builder_name = f"{track_id}-code-store-{store_id}" | ||||
| 
 | ||||
|                 builder_tags = pipeline_builder_name.split("-") | ||||
| 
 | ||||
|                 builders += [ | ||||
|                     buildbot.plugins.util.BuilderConfig( | ||||
|                         name=pipeline_builder_name, | ||||
|                         workernames=pipeline_worker_names, | ||||
|                         tags=builder_tags, | ||||
|                         factory=pipeline_build_factory, | ||||
|                     ) | ||||
|                 ] | ||||
| 
 | ||||
|                 scheduler_name = f"{track_id}-code-store-{store_id}-triggerable" | ||||
|                 triggerable_scheduler_names += [scheduler_name] | ||||
| 
 | ||||
|                 schedulers += [ | ||||
|                     plugins_schedulers.Triggerable( | ||||
|                         name=scheduler_name, builderNames=[pipeline_builder_name] | ||||
|                     ) | ||||
|                 ] | ||||
| 
 | ||||
|         # Create coordinator. | ||||
|         if triggerable_scheduler_names: | ||||
|             trigger_properties = {} | ||||
|             trigger_factory.addStep( | ||||
|                 plugins_steps.Trigger( | ||||
|                     schedulerNames=triggerable_scheduler_names, | ||||
|                     waitForFinish=True, | ||||
|                     updateSourceStamp=False, | ||||
|                     set_properties=trigger_properties, | ||||
|                     description="running", | ||||
|                     descriptionDone="completed", | ||||
|                 ) | ||||
|             ) | ||||
| 
 | ||||
|             coordinator_builder_name = f"{track_id}-code-store-coordinator" | ||||
|             builder_tags = coordinator_builder_name.split("-") | ||||
| 
 | ||||
|             builders += [ | ||||
|                 buildbot.plugins.util.BuilderConfig( | ||||
|                     name=coordinator_builder_name, | ||||
|                     workernames=local_worker_names, | ||||
|                     tags=builder_tags, | ||||
|                     factory=trigger_factory, | ||||
|                 ) | ||||
|             ] | ||||
| 
 | ||||
|             coordinator_scheduler_name = f"{track_id}-code-store-coordinator-force" | ||||
|             schedulers += [ | ||||
|                 plugins_schedulers.ForceScheduler( | ||||
|                     name=coordinator_scheduler_name, | ||||
|                     buttonName="Trigger store build", | ||||
|                     builderNames=[coordinator_builder_name], | ||||
|                     codebases=[ | ||||
|                         buildbot.plugins.util.CodebaseParameter( | ||||
|                             codebase="", revision=None, hide=True | ||||
|                         ) | ||||
|                     ], | ||||
|                     properties=scheduler_properties, | ||||
|                 ) | ||||
|             ] | ||||
| 
 | ||||
|             if needs_nightly_schedulers and (track_id in track_ids): | ||||
|                 nightly_scheduler_name = f"{track_id}-code-store-coordinator-nightly" | ||||
|                 nightly_properties = { | ||||
|                     "revision": "HEAD", | ||||
|                 } | ||||
|                 nightly_codebases = { | ||||
|                     "blender.git": { | ||||
|                         "repository": "", | ||||
|                         "branch": tracked_branch_ids[track_id], | ||||
|                         "revision": None, | ||||
|                     } | ||||
|                 } | ||||
|                 schedulers += [ | ||||
|                     plugins_schedulers.Nightly( | ||||
|                         name=nightly_scheduler_name, | ||||
|                         builderNames=[coordinator_builder_name], | ||||
|                         codebases=nightly_codebases, | ||||
|                         properties=nightly_properties, | ||||
|                         onlyIfChanged=False, | ||||
|                         hour=5, | ||||
|                         minute=30, | ||||
|                     ) | ||||
|                 ] | ||||
| 
 | ||||
|     return builders, schedulers | ||||
							
								
								
									
										342
									
								
								config/pipeline/common.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										342
									
								
								config/pipeline/common.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,342 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import buildbot.plugins | ||||
| 
 | ||||
| from buildbot.plugins import steps as plugins_steps | ||||
| from buildbot.plugins import schedulers as plugins_schedulers | ||||
| 
 | ||||
| import conf.machines | ||||
| 
 | ||||
| devops_git_root_path = "~/git" | ||||
| 
 | ||||
| # Steps that run on the buildbot master. | ||||
| code_pipeline_master_step_names = [ | ||||
|     "deduplicate-binaries", | ||||
|     "purge-binaries", | ||||
| ] | ||||
| 
 | ||||
| 
 | ||||
| def fetch_property(props, key, default=None): | ||||
|     value = default | ||||
|     if key in props: | ||||
|         value = props[key] | ||||
| 
 | ||||
|     return value | ||||
| 
 | ||||
| 
 | ||||
| def fetch_platform_architecture(props): | ||||
|     platform_architectures = fetch_property(props, key="platform_architectures") | ||||
| 
 | ||||
|     # Find the platform arch for this builder | ||||
|     buildername = fetch_property(props, key="buildername") | ||||
|     builder_platform_architecture = "-".join(buildername.split("-")[-2:]) | ||||
| 
 | ||||
|     found_platform_architecture = None | ||||
|     if platform_architectures: | ||||
|         for platform_architecture in platform_architectures: | ||||
|             if platform_architecture in builder_platform_architecture: | ||||
|                 found_platform_architecture = platform_architecture | ||||
|                 break | ||||
| 
 | ||||
|     if found_platform_architecture: | ||||
|         return found_platform_architecture.split("-") | ||||
|     else: | ||||
|         return None, None | ||||
| 
 | ||||
| 
 | ||||
| def always_do_step(step): | ||||
|     return True | ||||
| 
 | ||||
| 
 | ||||
| def needs_do_doc_pipeline_step(step): | ||||
|     if "package" in step.name or "deliver" in step.name: | ||||
|         return step.getProperty("needs_package_delivery") | ||||
|     else: | ||||
|         return True | ||||
| 
 | ||||
| 
 | ||||
| def create_worker_command(script, ENVIRONMENT, track_id, args): | ||||
|     # This relative path assume were are in: | ||||
|     # ~/.devops/services/buildbot-worker/<builder-name>/build | ||||
|     # There appears to be no way to expand a tilde here? | ||||
|     # | ||||
|     # This is assumed to run within the buildbot worker pipenv, | ||||
|     # so the python command should match the python version and | ||||
|     # available packages. | ||||
|     cmd = [ | ||||
|         "python", | ||||
|         f"../../../../../git/blender-devops/buildbot/worker/{script}", | ||||
|         "--track-id", | ||||
|         track_id, | ||||
|         "--service-env-id", | ||||
|         ENVIRONMENT, | ||||
|     ] | ||||
| 
 | ||||
|     return cmd + list(args) | ||||
| 
 | ||||
| 
 | ||||
| @buildbot.plugins.util.renderer | ||||
| def create_master_command_args( | ||||
|     props, ENVIRONMENT, track_id, pipeline_type, step_name, single_platform | ||||
| ): | ||||
|     build_configuration = fetch_property( | ||||
|         props, key="build_configuration", default="release" | ||||
|     ) | ||||
|     python_module = fetch_property(props, key="python_module", default=False) | ||||
| 
 | ||||
|     args = [ | ||||
|         "--pipeline-type", | ||||
|         pipeline_type, | ||||
|         "--build-configuration", | ||||
|         build_configuration, | ||||
|     ] | ||||
| 
 | ||||
|     if single_platform: | ||||
|         # Archive binaries for a single architecture only? | ||||
|         platform_id, architecture = fetch_platform_architecture(props) | ||||
|         args += ["--platform-id", platform_id, "--architecture", architecture] | ||||
| 
 | ||||
|     if python_module: | ||||
|         args += ["--python-module"] | ||||
| 
 | ||||
|     args += [step_name] | ||||
| 
 | ||||
|     # This relative path assume were are in: | ||||
|     # ~/.devops/services/buildbot-master | ||||
|     # There appears to be no way to expand a tilde here? | ||||
|     # | ||||
|     # This is assumed to run within the buildbot master pipenv, | ||||
|     # so the python command should match the python version and | ||||
|     # available packages. | ||||
|     cmd = [ | ||||
|         "python", | ||||
|         "../../../git/blender-devops/buildbot/worker/archive.py", | ||||
|         "--track-id", | ||||
|         track_id, | ||||
|         "--service-env-id", | ||||
|         ENVIRONMENT, | ||||
|     ] | ||||
| 
 | ||||
|     return cmd + list(args) | ||||
| 
 | ||||
| 
 | ||||
| @buildbot.plugins.util.renderer | ||||
| def create_pipeline_worker_command( | ||||
|     props, | ||||
|     ENVIRONMENT, | ||||
|     track_id, | ||||
|     script, | ||||
|     step_name, | ||||
|     variation_property, | ||||
|     variation, | ||||
|     builder_properties, | ||||
| ): | ||||
|     args = [step_name] | ||||
| 
 | ||||
|     if variation_property: | ||||
|         args += ["--" + variation_property.replace("_", "-"), variation] | ||||
| 
 | ||||
|     for builder_prop in builder_properties: | ||||
|         if builder_prop.name in props: | ||||
|             prop_value = props[builder_prop.name] | ||||
|         else: | ||||
|             prop_value = builder_prop.default | ||||
| 
 | ||||
|         argument_name = "--" + builder_prop.name.replace("_", "-") | ||||
|         if isinstance(builder_prop, buildbot.plugins.util.BooleanParameter): | ||||
|             if prop_value in ["true", True]: | ||||
|                 args += [argument_name] | ||||
|         else: | ||||
|             args += [argument_name, prop_value] | ||||
| 
 | ||||
|     if "revision" in props and props["revision"]: | ||||
|         args += ["--commit-id", props["revision"]] | ||||
| 
 | ||||
|     return create_worker_command(script, ENVIRONMENT, track_id, args) | ||||
| 
 | ||||
| 
 | ||||
| def create_pipeline( | ||||
|     ENVIRONMENT, | ||||
|     artifact_id, | ||||
|     script, | ||||
|     steps, | ||||
|     tracked_branch_ids, | ||||
|     properties, | ||||
|     codebase, | ||||
|     worker_group_ids, | ||||
|     variation_property=None, | ||||
|     variations=[""], | ||||
|     incremental_properties=None, | ||||
|     nightly_properties=None, | ||||
|     do_step_if=always_do_step, | ||||
|     default_step_timeout_in_seconds=600, | ||||
|     tree_stable_timer_in_seconds=180, | ||||
|     hour=5, | ||||
|     minute=0, | ||||
| ): | ||||
|     builders = [] | ||||
|     schedulers = [] | ||||
| 
 | ||||
|     platform_worker_names = conf.machines.fetch_platform_worker_names(ENVIRONMENT) | ||||
|     local_worker_names = conf.machines.fetch_local_worker_names() | ||||
| 
 | ||||
|     needs_incremental_schedulers = ( | ||||
|         incremental_properties is not None and ENVIRONMENT in ["PROD"] | ||||
|     ) | ||||
|     needs_nightly_schedulers = nightly_properties is not None and ENVIRONMENT in [ | ||||
|         "PROD" | ||||
|     ] | ||||
|     track_ids = tracked_branch_ids.keys() | ||||
| 
 | ||||
|     print(f"*** Creating [{artifact_id}] pipeline") | ||||
|     for track_id in track_ids: | ||||
|         triggerable_scheduler_names = [] | ||||
|         trigger_factory = buildbot.plugins.util.BuildFactory() | ||||
| 
 | ||||
|         for worker_group_id, variation in zip(worker_group_ids, variations): | ||||
|             if variation: | ||||
|                 pipeline_builder_name = f"{track_id}-{artifact_id}-{variation}" | ||||
|             else: | ||||
|                 pipeline_builder_name = f"{track_id}-{artifact_id}" | ||||
| 
 | ||||
|             pipeline_build_factory = buildbot.plugins.util.BuildFactory() | ||||
| 
 | ||||
|             print(f"Creating [{pipeline_builder_name}] pipeline steps") | ||||
|             for step in steps: | ||||
|                 if callable(step): | ||||
|                     pipeline_build_factory.addStep(step()) | ||||
|                     continue | ||||
| 
 | ||||
|                 step_command = create_pipeline_worker_command.withArgs( | ||||
|                     ENVIRONMENT, | ||||
|                     track_id, | ||||
|                     script, | ||||
|                     step, | ||||
|                     variation_property, | ||||
|                     variation, | ||||
|                     properties, | ||||
|                 ) | ||||
| 
 | ||||
|                 pipeline_build_factory.addStep( | ||||
|                     plugins_steps.ShellCommand( | ||||
|                         name=step, | ||||
|                         logEnviron=True, | ||||
|                         haltOnFailure=True, | ||||
|                         timeout=default_step_timeout_in_seconds, | ||||
|                         description="running", | ||||
|                         descriptionDone="completed", | ||||
|                         command=step_command, | ||||
|                         doStepIf=do_step_if, | ||||
|                     ) | ||||
|                 ) | ||||
| 
 | ||||
|             # Create builder. | ||||
|             pipeline_worker_names = platform_worker_names[worker_group_id] | ||||
|             if pipeline_worker_names: | ||||
|                 builder_tags = pipeline_builder_name.split("-") | ||||
| 
 | ||||
|                 builders += [ | ||||
|                     buildbot.plugins.util.BuilderConfig( | ||||
|                         name=pipeline_builder_name, | ||||
|                         workernames=pipeline_worker_names, | ||||
|                         tags=builder_tags, | ||||
|                         factory=pipeline_build_factory, | ||||
|                     ) | ||||
|                 ] | ||||
| 
 | ||||
|                 scheduler_name = f"{pipeline_builder_name}-triggerable" | ||||
|                 triggerable_scheduler_names += [scheduler_name] | ||||
| 
 | ||||
|                 schedulers += [ | ||||
|                     plugins_schedulers.Triggerable( | ||||
|                         name=scheduler_name, builderNames=[pipeline_builder_name] | ||||
|                     ) | ||||
|                 ] | ||||
| 
 | ||||
|         # Only create scheduler if we have something to to trigger | ||||
|         if triggerable_scheduler_names: | ||||
|             trigger_properties = {} | ||||
|             for property in properties: | ||||
|                 if property != variation_property: | ||||
|                     trigger_properties[property.name] = buildbot.plugins.util.Property( | ||||
|                         property.name | ||||
|                     ) | ||||
| 
 | ||||
|             trigger_factory.addStep( | ||||
|                 plugins_steps.Trigger( | ||||
|                     schedulerNames=triggerable_scheduler_names, | ||||
|                     waitForFinish=True, | ||||
|                     updateSourceStamp=False, | ||||
|                     set_properties=trigger_properties, | ||||
|                     description="running", | ||||
|                     descriptionDone="completed", | ||||
|                 ) | ||||
|             ) | ||||
| 
 | ||||
|             coordinator_builder_name = f"{track_id}-{artifact_id}-coordinator" | ||||
|             builder_tags = coordinator_builder_name.split("-") | ||||
|             builders += [ | ||||
|                 buildbot.plugins.util.BuilderConfig( | ||||
|                     name=coordinator_builder_name, | ||||
|                     workernames=local_worker_names, | ||||
|                     tags=builder_tags, | ||||
|                     factory=trigger_factory, | ||||
|                 ) | ||||
|             ] | ||||
| 
 | ||||
|             coordinator_scheduler_name = f"{track_id}-{artifact_id}-coordinator-force" | ||||
|             schedulers += [ | ||||
|                 plugins_schedulers.ForceScheduler( | ||||
|                     name=coordinator_scheduler_name, | ||||
|                     buttonName="Trigger build", | ||||
|                     builderNames=[coordinator_builder_name], | ||||
|                     codebases=[ | ||||
|                         buildbot.plugins.util.CodebaseParameter( | ||||
|                             codebase="", revision=None, hide=True | ||||
|                         ) | ||||
|                     ], | ||||
|                     properties=properties, | ||||
|                 ) | ||||
|             ] | ||||
| 
 | ||||
|             if needs_incremental_schedulers and (track_id in track_ids): | ||||
|                 incremental_scheduler_name = ( | ||||
|                     f"{track_id}-{artifact_id}-coordinator-incremental" | ||||
|                 ) | ||||
|                 change_filter = buildbot.plugins.util.ChangeFilter( | ||||
|                     project=[codebase], branch=tracked_branch_ids[track_id] | ||||
|                 ) | ||||
|                 schedulers += [ | ||||
|                     plugins_schedulers.SingleBranchScheduler( | ||||
|                         name=incremental_scheduler_name, | ||||
|                         builderNames=[coordinator_builder_name], | ||||
|                         change_filter=change_filter, | ||||
|                         properties=incremental_properties, | ||||
|                         treeStableTimer=tree_stable_timer_in_seconds, | ||||
|                     ) | ||||
|                 ] | ||||
| 
 | ||||
|             if needs_nightly_schedulers and (track_id in track_ids): | ||||
|                 nightly_codebases = { | ||||
|                     codebase: { | ||||
|                         "repository": "", | ||||
|                         "branch": tracked_branch_ids[track_id], | ||||
|                         "revision": None, | ||||
|                     } | ||||
|                 } | ||||
|                 nightly_scheduler_name = f"{track_id}-{artifact_id}-coordinator-nightly" | ||||
|                 schedulers += [ | ||||
|                     plugins_schedulers.Nightly( | ||||
|                         name=nightly_scheduler_name, | ||||
|                         builderNames=[coordinator_builder_name], | ||||
|                         codebases=nightly_codebases, | ||||
|                         properties=nightly_properties, | ||||
|                         onlyIfChanged=False, | ||||
|                         hour=hour, | ||||
|                         minute=minute, | ||||
|                     ) | ||||
|                 ] | ||||
| 
 | ||||
|     return builders, schedulers | ||||
							
								
								
									
										54
									
								
								config/pipeline/doc_api.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										54
									
								
								config/pipeline/doc_api.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,54 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import buildbot.plugins | ||||
| 
 | ||||
| import conf.branches | ||||
| import pipeline.common | ||||
| 
 | ||||
| 
 | ||||
| def populate(ENVIRONMENT): | ||||
|     properties = [ | ||||
|         buildbot.plugins.util.BooleanParameter( | ||||
|             name="needs_full_clean", | ||||
|             label="Full clean -> removes build workspace on machine", | ||||
|             required=True, | ||||
|             strict=True, | ||||
|             default=False, | ||||
|         ), | ||||
|         buildbot.plugins.util.BooleanParameter( | ||||
|             name="needs_package_delivery", | ||||
|             label="Package delivery -> push build to configured services", | ||||
|             required=True, | ||||
|             strict=True, | ||||
|             default=False, | ||||
|         ), | ||||
|     ] | ||||
| 
 | ||||
|     return pipeline.common.create_pipeline( | ||||
|         ENVIRONMENT, | ||||
|         "doc-api", | ||||
|         "doc_api.py", | ||||
|         [ | ||||
|             "configure-machine", | ||||
|             "update-code", | ||||
|             "compile-code", | ||||
|             "compile-install", | ||||
|             "compile", | ||||
|             "package", | ||||
|             "deliver", | ||||
|             "clean", | ||||
|         ], | ||||
|         conf.branches.code_tracked_branch_ids, | ||||
|         properties, | ||||
|         "blender.git", | ||||
|         ["linux-x86_64-general"], | ||||
|         variations=["html"], | ||||
|         incremental_properties={"needs_package_delivery": False}, | ||||
|         nightly_properties={"needs_package_delivery": True}, | ||||
|         tree_stable_timer_in_seconds=15 * 60, | ||||
|         do_step_if=pipeline.common.needs_do_doc_pipeline_step, | ||||
|         hour=1, | ||||
|         minute=30, | ||||
|     ) | ||||
							
								
								
									
										32
									
								
								config/pipeline/doc_developer.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										32
									
								
								config/pipeline/doc_developer.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,32 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import buildbot.plugins | ||||
| 
 | ||||
| import pipeline.common | ||||
| 
 | ||||
| 
 | ||||
| def populate(ENVIRONMENT): | ||||
|     properties = [ | ||||
|         buildbot.plugins.util.BooleanParameter( | ||||
|             name="needs_package_delivery", | ||||
|             label="Package delivery -> push build to configured services", | ||||
|             required=True, | ||||
|             strict=True, | ||||
|             default=True, | ||||
|         ), | ||||
|     ] | ||||
| 
 | ||||
|     return pipeline.common.create_pipeline( | ||||
|         ENVIRONMENT, | ||||
|         "doc-developer", | ||||
|         "doc_developer.py", | ||||
|         ["update", "compile", "deliver"], | ||||
|         {"vdev": "main"}, | ||||
|         properties, | ||||
|         "blender-developer-docs.git", | ||||
|         ["linux-x86_64-general"], | ||||
|         incremental_properties={"needs_package_delivery": True}, | ||||
|         do_step_if=pipeline.common.needs_do_doc_pipeline_step, | ||||
|     ) | ||||
							
								
								
									
										47
									
								
								config/pipeline/doc_manual.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										47
									
								
								config/pipeline/doc_manual.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,47 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import buildbot.plugins | ||||
| 
 | ||||
| import conf.branches | ||||
| import pipeline.common | ||||
| 
 | ||||
| 
 | ||||
| def populate(ENVIRONMENT): | ||||
|     properties = [ | ||||
|         buildbot.plugins.util.BooleanParameter( | ||||
|             name="needs_package_delivery", | ||||
|             label="Package delivery -> push build to configured services", | ||||
|             required=True, | ||||
|             strict=True, | ||||
|             default=True, | ||||
|         ), | ||||
|         buildbot.plugins.util.BooleanParameter( | ||||
|             name="needs_all_locales", | ||||
|             label="All locales -> process all configure locales", | ||||
|             required=True, | ||||
|             strict=True, | ||||
|             default=False, | ||||
|         ), | ||||
|     ] | ||||
| 
 | ||||
|     return pipeline.common.create_pipeline( | ||||
|         ENVIRONMENT, | ||||
|         "doc-manual", | ||||
|         "doc_manual.py", | ||||
|         ["configure-machine", "update", "compile", "package", "deliver", "clean"], | ||||
|         conf.branches.code_tracked_branch_ids, | ||||
|         properties, | ||||
|         "blender-manual.git", | ||||
|         ["linux-x86_64-general", "linux-x86_64-general"], | ||||
|         variation_property="doc_format", | ||||
|         variations=["html", "epub"], | ||||
|         incremental_properties={ | ||||
|             "needs_package_delivery": True, | ||||
|             "needs_all_locales": False, | ||||
|         }, | ||||
|         nightly_properties={"needs_package_delivery": True, "needs_all_locales": True}, | ||||
|         tree_stable_timer_in_seconds=15 * 60, | ||||
|         do_step_if=pipeline.common.needs_do_doc_pipeline_step, | ||||
|     ) | ||||
							
								
								
									
										32
									
								
								config/pipeline/doc_studio.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										32
									
								
								config/pipeline/doc_studio.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,32 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import buildbot.plugins | ||||
| 
 | ||||
| import pipeline.common | ||||
| 
 | ||||
| 
 | ||||
| def populate(ENVIRONMENT): | ||||
|     properties = [ | ||||
|         buildbot.plugins.util.BooleanParameter( | ||||
|             name="needs_package_delivery", | ||||
|             label="Package delivery -> push build to configured services", | ||||
|             required=True, | ||||
|             strict=True, | ||||
|             default=True, | ||||
|         ), | ||||
|     ] | ||||
| 
 | ||||
|     return pipeline.common.create_pipeline( | ||||
|         ENVIRONMENT, | ||||
|         "doc-studio-tools", | ||||
|         "doc_studio.py", | ||||
|         ["update", "compile", "deliver"], | ||||
|         {"vdev": "main"}, | ||||
|         properties, | ||||
|         "blender-studio-tools.git", | ||||
|         ["linux-x86_64-doc-studio-tools"], | ||||
|         incremental_properties={"needs_package_delivery": True}, | ||||
|         do_step_if=pipeline.common.needs_do_doc_pipeline_step, | ||||
|     ) | ||||
|  | @ -31,12 +31,11 @@ importlib.reload(conf.worker) | |||
| importlib.reload(gitea.blender) | ||||
| importlib.reload(pipeline) | ||||
| 
 | ||||
| devops_env_id = os.environ.get("DEVOPS_ENV_ID", default="LOCAL") | ||||
| devops_host_id = os.environ.get("DEVOPS_HOST_ID", default="localhost") | ||||
| ENVIRONMENT = os.environ.get("ENVIRONMENT", default="LOCAL") | ||||
| 
 | ||||
| 
 | ||||
| def setup() -> Dict[str, Any]: | ||||
|     ####### MAIN - configuration | ||||
|     ####### CONFIGURATION | ||||
|     c = {} | ||||
| 
 | ||||
|     # Change Source | ||||
|  | @ -44,7 +43,7 @@ def setup() -> Dict[str, Any]: | |||
| 
 | ||||
|     # Workers | ||||
|     print("*** Creating platform workers") | ||||
|     platform_worker_names = conf.machines.fetch_platform_worker_names(devops_env_id) | ||||
|     platform_worker_names = conf.machines.fetch_platform_worker_names(ENVIRONMENT) | ||||
|     workers: List[buildbot.plugins.worker.Worker] = [] | ||||
|     configured_worker_names = set() | ||||
|     for worker_names in platform_worker_names.values(): | ||||
|  | @ -56,7 +55,7 @@ def setup() -> Dict[str, Any]: | |||
|             workers += [ | ||||
|                 buildbot.plugins.worker.Worker( | ||||
|                     worker_name, | ||||
|                     conf.machines.get_worker_password(devops_env_id, worker_name), | ||||
|                     conf.machines.get_worker_password(ENVIRONMENT, worker_name), | ||||
|                     max_builds=1, | ||||
|                     keepalive_interval=3600, | ||||
|                 ) | ||||
|  | @ -70,7 +69,7 @@ def setup() -> Dict[str, Any]: | |||
|     c["workers"] = workers | ||||
| 
 | ||||
|     # Builders and Schedulers | ||||
|     builders, schedulers = pipeline.populate(devops_env_id) | ||||
|     builders, schedulers = pipeline.populate(ENVIRONMENT) | ||||
|     c["builders"] = builders | ||||
|     c["schedulers"] = schedulers | ||||
| 
 | ||||
|  | @ -80,7 +79,7 @@ def setup() -> Dict[str, Any]: | |||
|     # status of each build will be pushed to these targets. buildbot/reporters/*.py | ||||
|     # has a variety to choose from, like IRC bots. | ||||
| 
 | ||||
|     gitea_status_service = gitea.blender.setup_service(devops_env_id) | ||||
|     gitea_status_service = gitea.blender.setup_service(ENVIRONMENT) | ||||
|     if gitea_status_service: | ||||
|         c["services"] = [gitea_status_service] | ||||
|     else: | ||||
|  | @ -91,42 +90,33 @@ def setup() -> Dict[str, Any]: | |||
|     # the 'title' string will appear at the top of this buildbot installation's | ||||
|     # home pages (linked to the 'titleURL'). | ||||
| 
 | ||||
|     c["title"] = f"Bot - {devops_env_id}" | ||||
|     c["title"] = f"Blender Buildbot - {ENVIRONMENT}" | ||||
|     c["titleURL"] = "https://projects.blender.org" | ||||
| 
 | ||||
|     # the 'buildbotURL' string should point to the location where the buildbot's | ||||
|     # internal web server is visible. This typically uses the port number set in | ||||
|     # the 'www' entry below, but with an externally-visible host name which the | ||||
|     # buildbot cannot figure out without some help. | ||||
|     c["buildbotURL"] = f"http://{devops_host_id}:8010/" | ||||
| 
 | ||||
|     if devops_env_id != "LOCAL": | ||||
|         c["buildbotURL"] = f"http://{devops_host_id}:8000/admin/" | ||||
| 
 | ||||
|         if devops_env_id == "PROD": | ||||
|             c["buildbotURL"] = "https://builder.blender.org/admin/" | ||||
|         if devops_env_id == "UATEST": | ||||
|             c["buildbotURL"] = "https://builder.uatest.blender.org/admin/" | ||||
|     c["buildbotURL"] = os.environ.get("BUILDBOT_WEB_URL", "http://localhost:8010/") | ||||
| 
 | ||||
|     # Minimalistic config to activate new web UI | ||||
|     c["www"] = dict( | ||||
|         port=8010, plugins=dict(waterfall_view={}, console_view={}, grid_view={}) | ||||
|         port=os.environ.get("BUILDBOT_WEB_PORT", 8010), | ||||
|         plugins=dict(waterfall_view={}, console_view={}, grid_view={}), | ||||
|     ) | ||||
| 
 | ||||
|     # Database | ||||
|     if devops_env_id == "LOCAL": | ||||
|         c["db"] = {"db_url": "sqlite:///state.sqlite"} | ||||
|     else: | ||||
|         # PostgreSQL database, as recommended for production environment. | ||||
|         c["db"] = {"db_url": "postgresql://buildbot@127.0.0.1/buildbot"} | ||||
|     c["db"] = { | ||||
|         "db_url": os.environ.get("BUILDBOT_DB_URL", "sqlite://").format(**os.environ) | ||||
|     } | ||||
| 
 | ||||
|     c["buildbotNetUsageData"] = None | ||||
| 
 | ||||
|     # Authentication | ||||
|     c["www"]["auth"] = conf.auth.fetch_authentication(devops_env_id) | ||||
|     c["www"]["auth"] = conf.auth.fetch_authentication(ENVIRONMENT) | ||||
| 
 | ||||
|     # Authorization | ||||
|     c["www"]["authz"] = conf.auth.fetch_authorization(devops_env_id) | ||||
|     c["www"]["authz"] = conf.auth.fetch_authorization(ENVIRONMENT) | ||||
| 
 | ||||
|     # Disable UI - does not work | ||||
|     c["www"]["plugins"] = { | ||||
|  | @ -160,8 +150,8 @@ def setup() -> Dict[str, Any]: | |||
|         r"https://projects.blender.org/\1/\2/commit/%s", | ||||
|     ) | ||||
| 
 | ||||
|     # Port for workers to connectto | ||||
|     c["protocols"] = {"pb": {"port": 9989}} | ||||
|     # Port for workers to connect to | ||||
|     c["protocols"] = {"pb": {"port": os.environ.get("BUILDBOT_WORKER_PORT", 9989)}} | ||||
| 
 | ||||
|     # Disable collapsing requests | ||||
|     c["collapseRequests"] = False | ||||
|  |  | |||
							
								
								
									
										0
									
								
								config/worker/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								config/worker/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
								
								
									
										367
									
								
								config/worker/archive.py
									
										
									
									
									
										Executable file
									
								
							
							
						
						
									
										367
									
								
								config/worker/archive.py
									
										
									
									
									
										Executable file
									
								
							|  | @ -0,0 +1,367 @@ | |||
| #!/usr/bin/env python3 | ||||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import argparse | ||||
| import datetime | ||||
| import os | ||||
| import pathlib | ||||
| import random | ||||
| import re | ||||
| import sys | ||||
| import time | ||||
| 
 | ||||
| from collections import OrderedDict | ||||
| from typing import Any, Dict, List, Optional, Sequence, Union | ||||
| 
 | ||||
| sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent)) | ||||
| 
 | ||||
| import worker.utils | ||||
| 
 | ||||
| package_file_pattern = re.compile( | ||||
|     r"^(?P<app_id>(blender|bpy))\-" | ||||
|     + r"(?P<version_id>[0-9]+\.[0-9]+\.[0-9]+)\-" | ||||
|     + r"(?P<risk_id>[a-z]+)\+" | ||||
|     + r"(?P<branch_id>[A-Za-z0-9_\-]+)\." | ||||
|     + r"(?P<commit_hash>[a-fA-f0-9]+)\-" | ||||
|     + r"(?P<platform_id>[A-Za-z0-9_]+)\." | ||||
|     + r"(?P<architecture>[A-Za-z0-9_]+)\-" | ||||
|     + r"(?P<build_configuration>(release|asserts|sanitizer|debug))\." | ||||
|     + r"(?P<file_extension>[A-Za-z0-9\.]+)" | ||||
| ) | ||||
| 
 | ||||
| pipeline_types = ["daily", "experimental", "patch"] | ||||
| platforms = ["linux", "windows", "darwin"] | ||||
| architectures = ["x86_64", "amd64", "arm64"] | ||||
| build_configurations = ["release", "asserts", "sanitizer", "debug"] | ||||
| 
 | ||||
| 
 | ||||
| class ArchiveBuilder(worker.utils.Builder): | ||||
|     def __init__(self, args: argparse.Namespace): | ||||
|         super().__init__(args, "blender", "blender") | ||||
|         self.pipeline_type = args.pipeline_type | ||||
|         self.platform_id = args.platform_id | ||||
|         self.architecture = args.architecture | ||||
|         self.build_configuration = args.build_configuration | ||||
|         self.python_module = args.python_module | ||||
|         self.dry_run = args.dry_run | ||||
|         self.retention_in_days = args.retention_in_days | ||||
| 
 | ||||
| 
 | ||||
| def file_age_in_days(file_path: pathlib.Path) -> float: | ||||
|     try: | ||||
|         file_path_mtime = os.path.getmtime(file_path) | ||||
|     except (FileNotFoundError, PermissionError) as e: | ||||
|         print(f"Error accessing file: {e}") | ||||
|         return 0.0 | ||||
| 
 | ||||
|     age_in_seconds = time.time() - file_path_mtime | ||||
|     return age_in_seconds / (3600.0 * 24.0) | ||||
| 
 | ||||
| 
 | ||||
| def parse_build_info(file_path: pathlib.Path) -> Optional[Dict]: | ||||
|     matches = re.match(package_file_pattern, file_path.name) | ||||
|     if not matches: | ||||
|         return None | ||||
|     build_info: Dict[str, Union[str, float, pathlib.Path]] = dict(matches.groupdict()) | ||||
|     build_info["file_age_in_days"] = file_age_in_days(file_path) | ||||
|     build_info["file_path"] = file_path | ||||
|     return build_info | ||||
| 
 | ||||
| 
 | ||||
| def archive_build(file_path: pathlib.Path, dry_run: bool) -> None: | ||||
|     # Archive build file itself and checksum | ||||
|     checksum_file_path = file_path.parent / (file_path.name + ".sha256") | ||||
| 
 | ||||
|     for source_file_path in [file_path, checksum_file_path]: | ||||
|         if not source_file_path.exists(): | ||||
|             continue | ||||
| 
 | ||||
|         archive_path = source_file_path.parent / "archive" | ||||
|         os.makedirs(archive_path, exist_ok=True) | ||||
|         dest_file_path = archive_path / source_file_path.name | ||||
| 
 | ||||
|         worker.utils.remove_file(dest_file_path, dry_run=dry_run) | ||||
|         worker.utils.move(source_file_path, dest_file_path, dry_run=dry_run) | ||||
| 
 | ||||
| 
 | ||||
| def fetch_current_builds( | ||||
|     builder: ArchiveBuilder, | ||||
|     pipeline_type: str, | ||||
|     short_version: Optional[str] = None, | ||||
|     all_platforms: bool = False, | ||||
| ) -> Dict[Any, List[Any]]: | ||||
|     worker_config = builder.get_worker_config() | ||||
|     download_path = worker_config.buildbot_download_folder | ||||
|     pipeline_build_path = download_path / pipeline_type | ||||
| 
 | ||||
|     print(f"Fetching current builds in [{pipeline_build_path}]") | ||||
|     build_groups: Dict[Any, List[Any]] = {} | ||||
|     for file_path in pipeline_build_path.glob("*.*"): | ||||
|         if not file_path.is_file(): | ||||
|             continue | ||||
|         if file_path.name.endswith(".sha256"): | ||||
|             continue | ||||
| 
 | ||||
|         build_info = parse_build_info(file_path) | ||||
|         if not build_info: | ||||
|             continue | ||||
|         if short_version and not build_info["version_id"].startswith( | ||||
|             short_version + "." | ||||
|         ): | ||||
|             continue | ||||
| 
 | ||||
|         if not all_platforms: | ||||
|             if ( | ||||
|                 builder.architecture | ||||
|                 and build_info["architecture"] != builder.architecture | ||||
|             ): | ||||
|                 continue | ||||
|             if builder.platform_id and build_info["platform_id"] != builder.platform_id: | ||||
|                 continue | ||||
|             if ( | ||||
|                 builder.build_configuration | ||||
|                 and build_info["build_configuration"] != builder.build_configuration | ||||
|             ): | ||||
|                 continue | ||||
| 
 | ||||
|         if pipeline_type == "daily": | ||||
|             key = ( | ||||
|                 "daily", | ||||
|                 build_info["file_extension"], | ||||
|                 build_info["architecture"], | ||||
|                 build_info["platform_id"], | ||||
|             ) | ||||
|         else: | ||||
|             key = ( | ||||
|                 build_info["branch_id"], | ||||
|                 build_info["file_extension"], | ||||
|                 build_info["architecture"], | ||||
|                 build_info["platform_id"], | ||||
|             ) | ||||
| 
 | ||||
|         if key in build_groups: | ||||
|             build_groups[key].append(build_info) | ||||
|         else: | ||||
|             build_groups[key] = [build_info] | ||||
| 
 | ||||
|     return build_groups | ||||
| 
 | ||||
| 
 | ||||
| def archive_build_group( | ||||
|     builds: Sequence[Dict], retention_in_days: int, dry_run: bool = True | ||||
| ) -> None: | ||||
|     builds = sorted(builds, key=lambda build: build["file_age_in_days"]) | ||||
| 
 | ||||
|     for i, build in enumerate(builds): | ||||
|         build_age = build["file_age_in_days"] | ||||
|         build_name = build["file_path"].name | ||||
| 
 | ||||
|         # Only keep the most recent build if there are multiple | ||||
|         if i > 0 or build_age > retention_in_days: | ||||
|             print(f"Archiving [{build_name}] (age: {build_age:.3f} days)") | ||||
|             archive_build(build["file_path"], dry_run=dry_run) | ||||
|         else: | ||||
|             print(f"Keeping [{build_name}] (age: {build_age:.3f} days)") | ||||
| 
 | ||||
| 
 | ||||
| def deduplicate(builder: ArchiveBuilder) -> None: | ||||
|     retention_in_days = builder.retention_in_days | ||||
|     dry_run = builder.dry_run | ||||
| 
 | ||||
|     # Get major.minor version to match. | ||||
|     short_version = "" | ||||
|     if builder.pipeline_type == "daily": | ||||
|         branches_config = builder.get_branches_config() | ||||
|         short_version = branches_config.track_major_minor_versions[builder.track_id] | ||||
| 
 | ||||
|         if not short_version: | ||||
|             raise BaseException( | ||||
|                 f"Missing version in [{builder.pipeline_type}] builds, aborting" | ||||
|             ) | ||||
| 
 | ||||
|     build_groups = fetch_current_builds( | ||||
|         builder, builder.pipeline_type, short_version=short_version | ||||
|     ) | ||||
| 
 | ||||
|     print( | ||||
|         f"Deduplicating [{builder.pipeline_type}] builds for [{short_version}] [{builder.build_configuration}] [{builder.platform_id}] [{builder.architecture}]" | ||||
|     ) | ||||
|     for key, build_group in build_groups.items(): | ||||
|         print("") | ||||
|         print("--- Group: " + str(key)) | ||||
|         archive_build_group(build_group, retention_in_days, dry_run=dry_run) | ||||
| 
 | ||||
| 
 | ||||
| def fetch_purge_builds( | ||||
|     builder: ArchiveBuilder, pipeline_type: str, folder: str | ||||
| ) -> Sequence[pathlib.Path]: | ||||
|     worker_config = builder.get_worker_config() | ||||
|     download_path = worker_config.buildbot_download_folder | ||||
|     archive_path = download_path / pipeline_type / folder | ||||
|     os.makedirs(archive_path, exist_ok=True) | ||||
| 
 | ||||
|     print(f"Fetching archived builds in [{archive_path}]") | ||||
|     builds = [] | ||||
|     for file_path in archive_path.glob("*.*"): | ||||
|         if not file_path.is_file(): | ||||
|             continue | ||||
|         if file_path.name.endswith(".sha256"): | ||||
|             continue | ||||
| 
 | ||||
|         builds.append(file_path) | ||||
| 
 | ||||
|     return builds | ||||
| 
 | ||||
| 
 | ||||
| def purge(builder: ArchiveBuilder) -> None: | ||||
|     builds_retention_in_days = builder.retention_in_days | ||||
|     tests_retention_in_days = 10 | ||||
|     dry_run = builder.dry_run | ||||
| 
 | ||||
|     for pipeline_type in pipeline_types: | ||||
|         if pipeline_type != "daily": | ||||
|             print("=" * 120) | ||||
|             print(f"Deduplicating [{pipeline_type}] builds") | ||||
|             build_groups = fetch_current_builds( | ||||
|                 builder, pipeline_type, all_platforms=True | ||||
|             ) | ||||
|             for key, build_group in build_groups.items(): | ||||
|                 print("") | ||||
|                 print("--- Group: " + str(key)) | ||||
|                 archive_build_group( | ||||
|                     build_group, builds_retention_in_days, dry_run=dry_run | ||||
|                 ) | ||||
| 
 | ||||
|         print("=" * 120) | ||||
|         print( | ||||
|             f"Purging [{pipeline_type}] builds older than [{builds_retention_in_days}] days" | ||||
|         ) | ||||
|         for file_path in fetch_purge_builds(builder, pipeline_type, "archive"): | ||||
|             if file_age_in_days(file_path) < builds_retention_in_days: | ||||
|                 continue | ||||
| 
 | ||||
|             age = file_age_in_days(file_path) | ||||
|             checksum_file_path = file_path.parent / (file_path.name + ".sha256") | ||||
| 
 | ||||
|             print(f"Deleting [{file_path.name}] (age: {age:.3f} days)") | ||||
|             worker.utils.remove_file(file_path, dry_run=dry_run) | ||||
|             worker.utils.remove_file(checksum_file_path, dry_run=dry_run) | ||||
| 
 | ||||
|         print("=" * 120) | ||||
|         print( | ||||
|             f"Purging [{pipeline_type}] tests older than [{tests_retention_in_days}] days" | ||||
|         ) | ||||
|         for file_path in fetch_purge_builds(builder, pipeline_type, "tests"): | ||||
|             if file_age_in_days(file_path) < tests_retention_in_days: | ||||
|                 continue | ||||
| 
 | ||||
|             age = file_age_in_days(file_path) | ||||
|             checksum_file_path = file_path.parent / (file_path.name + ".sha256") | ||||
| 
 | ||||
|             print(f"Deleting [{file_path.name}] (age: {age:.3f} days)") | ||||
|             worker.utils.remove_file(file_path, dry_run=dry_run) | ||||
|             worker.utils.remove_file(checksum_file_path, dry_run=dry_run) | ||||
| 
 | ||||
| 
 | ||||
| def generate_test_data(builder: ArchiveBuilder) -> None: | ||||
|     worker_config = builder.get_worker_config() | ||||
|     download_path = worker_config.buildbot_download_folder | ||||
| 
 | ||||
|     branches_config = builder.get_branches_config() | ||||
|     short_version = branches_config.track_major_minor_versions[builder.track_id] | ||||
|     short_version + ".0" | ||||
| 
 | ||||
|     app_id = "bpy" if builder.python_module else "blender" | ||||
|     commit_hashes = ["1ddf858", "03a2a53"] | ||||
|     risk_ids = ["stable", "alpha"] | ||||
|     file_extensions = ["zip", "msi"] | ||||
| 
 | ||||
|     if builder.pipeline_type == "daily": | ||||
|         versions = [short_version + ".0", short_version + ".1"] | ||||
|         branches = ["main", "v50"] | ||||
|         build_configurations = ["release"] | ||||
|     elif builder.pipeline_type == "patch": | ||||
|         versions = ["5.0.0", "7.0.0"] | ||||
|         branches = ["PR123", "PR456", "PR789"] | ||||
|         build_configurations = ["release", "debug"] | ||||
|     else: | ||||
|         versions = ["4.0.0", "6.0.0"] | ||||
|         branches = ["realtime-compositor", "cycles-x"] | ||||
|         build_configurations = ["release", "debug"] | ||||
| 
 | ||||
|     pipeline_path = download_path / builder.pipeline_type | ||||
|     os.makedirs(pipeline_path, exist_ok=True) | ||||
| 
 | ||||
|     for i in range(0, 25): | ||||
|         filename = ( | ||||
|             app_id | ||||
|             + "-" | ||||
|             + random.choice(versions) | ||||
|             + "-" | ||||
|             + random.choice(risk_ids) | ||||
|             + "+" | ||||
|             + random.choice(branches) | ||||
|             + "." | ||||
|             + random.choice(commit_hashes) | ||||
|             + "-" | ||||
|             + builder.platform_id | ||||
|             + "." | ||||
|             + builder.architecture | ||||
|             + "-" | ||||
|             + random.choice(build_configurations) | ||||
|             + "." | ||||
|             + random.choice(file_extensions) | ||||
|         ) | ||||
| 
 | ||||
|         file_path = pipeline_path / filename | ||||
|         file_path.write_text("Test") | ||||
| 
 | ||||
|         checksum_file_path = file_path.parent / (file_path.name + ".sha256") | ||||
|         checksum_file_path.write_text("Test") | ||||
| 
 | ||||
|         delta = datetime.timedelta(days=365 * random.random()) | ||||
|         filetime = time.mktime((datetime.datetime.today() - delta).timetuple()) | ||||
|         os.utime(file_path, (filetime, filetime)) | ||||
|         os.utime(checksum_file_path, (filetime, filetime)) | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == "__main__": | ||||
|     steps: worker.utils.BuilderSteps = OrderedDict() | ||||
|     steps["deduplicate-binaries"] = deduplicate | ||||
|     steps["purge-binaries"] = purge | ||||
| 
 | ||||
|     parser = worker.utils.create_argument_parser(steps=steps) | ||||
|     parser.add_argument( | ||||
|         "--pipeline-type", | ||||
|         default="daily", | ||||
|         type=str, | ||||
|         choices=pipeline_types, | ||||
|         required=False, | ||||
|     ) | ||||
|     parser.add_argument( | ||||
|         "--platform-id", default="", type=str, choices=platforms, required=False | ||||
|     ) | ||||
|     parser.add_argument( | ||||
|         "--architecture", default="", type=str, choices=architectures, required=False | ||||
|     ) | ||||
|     parser.add_argument( | ||||
|         "--build-configuration", | ||||
|         default="release", | ||||
|         type=str, | ||||
|         choices=build_configurations, | ||||
|         required=False, | ||||
|     ) | ||||
|     parser.add_argument("--retention-in-days", default=100, type=int, required=False) | ||||
|     parser.add_argument("--python-module", action="store_true", required=False) | ||||
|     parser.add_argument("--dry-run", action="store_true", required=False) | ||||
|     parser.add_argument("--generate-test-data", action="store_true", required=False) | ||||
| 
 | ||||
|     args = parser.parse_args() | ||||
|     builder = ArchiveBuilder(args) | ||||
| 
 | ||||
|     if args.generate_test_data: | ||||
|         generate_test_data(builder) | ||||
| 
 | ||||
|     builder.run(args.step, steps) | ||||
							
								
								
									
										199
									
								
								config/worker/blender/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										199
									
								
								config/worker/blender/__init__.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,199 @@ | |||
| #!/usr/bin/env python3 | ||||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import argparse | ||||
| import os | ||||
| import pathlib | ||||
| import re | ||||
| import subprocess | ||||
| 
 | ||||
| import worker.utils | ||||
| 
 | ||||
| 
 | ||||
| class CodeBuilder(worker.utils.Builder): | ||||
|     def __init__(self, args: argparse.Namespace): | ||||
|         super().__init__(args, "blender", "blender") | ||||
|         self.needs_full_clean = args.needs_full_clean | ||||
|         self.needs_gpu_binaries = args.needs_gpu_binaries | ||||
|         self.needs_gpu_tests = args.needs_gpu_tests | ||||
|         self.needs_ninja = True | ||||
|         self.python_module = args.python_module | ||||
|         self.build_configuration = args.build_configuration | ||||
| 
 | ||||
|         track_path: pathlib.Path = self.track_path | ||||
| 
 | ||||
|         if self.platform in {"darwin", "windows"}: | ||||
|             if len(args.architecture): | ||||
|                 self.architecture = args.architecture | ||||
| 
 | ||||
|         if self.platform == "darwin": | ||||
|             self.build_dir = ( | ||||
|                 track_path / f"build_{self.architecture}_{self.build_configuration}" | ||||
|             ) | ||||
|         else: | ||||
|             self.build_dir = track_path / f"build_{self.build_configuration}" | ||||
| 
 | ||||
|         self.blender_dir = track_path / "blender.git" | ||||
|         self.install_dir = track_path / f"install_{self.build_configuration}" | ||||
|         self.package_dir = track_path / "build_package" | ||||
|         self.build_doc_path = track_path / "build_doc_api" | ||||
| 
 | ||||
|     def clean(self): | ||||
|         worker.utils.remove_dir(self.install_dir) | ||||
|         worker.utils.remove_dir(self.package_dir) | ||||
|         worker.utils.remove_dir(self.build_doc_path) | ||||
| 
 | ||||
|     # Call command with in compiler environment. | ||||
|     def call( | ||||
|         self, cmd: worker.utils.CmdSequence, env: worker.utils.CmdEnvironment = None | ||||
|     ) -> int: | ||||
|         cmd_prefix: worker.utils.CmdList = [] | ||||
| 
 | ||||
|         if self.platform == "darwin": | ||||
|             # On macOS, override Xcode version if requested. | ||||
|             pipeline_config = self.pipeline_config() | ||||
|             xcode = pipeline_config.get("xcode", None) | ||||
|             xcode_version = xcode.get("version", None) if xcode else None | ||||
| 
 | ||||
|             if xcode_version: | ||||
|                 developer_dir = ( | ||||
|                     f"/Applications/Xcode-{xcode_version}.app/Contents/Developer" | ||||
|                 ) | ||||
|             else: | ||||
|                 developer_dir = "/Applications/Xcode.app/Contents/Developer" | ||||
| 
 | ||||
|             if ( | ||||
|                 self.service_env_id == "LOCAL" | ||||
|                 and not pathlib.Path(developer_dir).exists() | ||||
|             ): | ||||
|                 worker.utils.warning( | ||||
|                     f"Skip using non-existent {developer_dir} in LOCAL service environment" | ||||
|                 ) | ||||
|             else: | ||||
|                 cmd_prefix = ["xcrun"] | ||||
|                 env = dict(env) if env else os.environ.copy() | ||||
|                 env["DEVELOPER_DIR"] = developer_dir | ||||
| 
 | ||||
|         elif worker.utils.is_tool("scl"): | ||||
|             pipeline_config = self.pipeline_config() | ||||
|             gcc_version = pipeline_config["gcc"]["version"] | ||||
|             gcc_major_version = gcc_version.split(".")[0] | ||||
| 
 | ||||
|             # On Rocky | ||||
|             if os.path.exists("/etc/rocky-release"): | ||||
|                 # Stub to override configured GCC version, remove when blender build config is fixed | ||||
|                 gcc_major_version = "11" | ||||
|                 cmd_prefix = ["scl", "enable", f"gcc-toolset-{gcc_major_version}", "--"] | ||||
| 
 | ||||
|         return worker.utils.call(cmd_prefix + list(cmd), env=env) | ||||
| 
 | ||||
|     def pipeline_config(self) -> dict: | ||||
|         config_file_path = ( | ||||
|             self.code_path / "build_files" / "config" / "pipeline_config.json" | ||||
|         ) | ||||
|         if not config_file_path.exists(): | ||||
|             config_file_path = config_file_path.with_suffix(".yaml") | ||||
|         if not config_file_path.exists(): | ||||
|             raise Exception(f"Config file [{config_file_path}] not found, aborting") | ||||
| 
 | ||||
|         with open(config_file_path, "r") as read_file: | ||||
|             if config_file_path.suffix == ".json": | ||||
|                 import json | ||||
| 
 | ||||
|                 pipeline_config = json.load(read_file) | ||||
|             else: | ||||
|                 import yaml | ||||
| 
 | ||||
|                 pipeline_config = yaml.load(read_file, Loader=yaml.SafeLoader) | ||||
| 
 | ||||
|             return pipeline_config["buildbot"] | ||||
| 
 | ||||
|     def blender_command_path(self) -> pathlib.Path: | ||||
|         if self.platform == "darwin": | ||||
|             return self.install_dir / "Blender.app" / "Contents" / "macOS" / "Blender" | ||||
|         elif self.platform == "windows": | ||||
|             return self.install_dir / "blender.exe" | ||||
|         else: | ||||
|             return self.install_dir / "blender" | ||||
| 
 | ||||
|     def setup_build_environment(self) -> None: | ||||
|         if self.platform != "windows": | ||||
|             return | ||||
| 
 | ||||
|         # CMake goes first to avoid using chocolaty cpack command. | ||||
|         worker.utils.info("Setting CMake path") | ||||
|         os.environ["PATH"] = ( | ||||
|             "C:\\Program Files\\CMake\\bin" + os.pathsep + os.environ["PATH"] | ||||
|         ) | ||||
| 
 | ||||
|         worker.utils.info("Setting VC Tools env variables") | ||||
|         windows_build_version = "10.0.19041.0" | ||||
|         os.environ["PATH"] = ( | ||||
|             f"C:\\Program Files (x86)\\Windows Kits\\10\\bin\\{windows_build_version}\\x64" | ||||
|             + os.pathsep | ||||
|             + os.environ["PATH"] | ||||
|         ) | ||||
|         os.environ["PATH"] = ( | ||||
|             "C:\\Program Files (x86)\\WiX Toolset v3.11\\bin" | ||||
|             + os.pathsep | ||||
|             + os.environ["PATH"] | ||||
|         ) | ||||
| 
 | ||||
|         if self.architecture == "arm64": | ||||
|             vs_build_tool_path = pathlib.Path( | ||||
|                 "C:\\Program Files\\Microsoft Visual Studio\\2022\\Community\\VC\\Auxiliary\\Build\\vcvarsarm64.bat" | ||||
|             ) | ||||
|             vs_tool_install_dir_suffix = "\\bin\\Hostarm64\\arm64" | ||||
|         else: | ||||
|             vs_build_tool_path = pathlib.Path( | ||||
|                 "C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community\\VC\\Auxiliary\\Build\\vcvars64.bat" | ||||
|             ) | ||||
|             vs_tool_install_dir_suffix = "\\bin\\Hostx64\\x64" | ||||
| 
 | ||||
|         vcvars_output = subprocess.check_output( | ||||
|             [vs_build_tool_path, "&&", "set"], shell=True | ||||
|         ) | ||||
|         vcvars_text = vcvars_output.decode("utf-8", "ignore") | ||||
| 
 | ||||
|         for line in vcvars_text.splitlines(): | ||||
|             match = re.match(r"(.*?)=(.*)", line) | ||||
|             if match: | ||||
|                 key = match.group(1) | ||||
|                 value = match.group(2) | ||||
| 
 | ||||
|                 if key not in os.environ: | ||||
|                     if key not in ["PROMPT", "Path"]: | ||||
|                         worker.utils.info(f"Adding key {key}={value}") | ||||
|                         os.environ[key] = value | ||||
| 
 | ||||
|         os.environ["PATH"] = ( | ||||
|             os.environ["VCToolsInstallDir"] | ||||
|             + vs_tool_install_dir_suffix | ||||
|             + os.pathsep | ||||
|             + os.environ["PATH"] | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| def create_argument_parser(steps: worker.utils.BuilderSteps) -> argparse.ArgumentParser: | ||||
|     parser = worker.utils.create_argument_parser(steps=steps) | ||||
|     parser.add_argument("--needs-full-clean", action="store_true", required=False) | ||||
|     parser.add_argument("--needs-gpu-binaries", action="store_true", required=False) | ||||
|     parser.add_argument("--needs-gpu-tests", action="store_true", required=False) | ||||
|     parser.add_argument("--python-module", action="store_true", required=False) | ||||
|     parser.add_argument( | ||||
|         "--build-configuration", | ||||
|         default="release", | ||||
|         type=str, | ||||
|         choices=["release", "asserts", "sanitizer", "debug"], | ||||
|         required=False, | ||||
|     ) | ||||
|     parser.add_argument( | ||||
|         "--architecture", | ||||
|         default="", | ||||
|         type=str, | ||||
|         choices=["arm64", "x86_64", "amd64"], | ||||
|         required=False, | ||||
|     ) | ||||
|     return parser | ||||
							
								
								
									
										124
									
								
								config/worker/blender/benchmark.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										124
									
								
								config/worker/blender/benchmark.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,124 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import json | ||||
| import os | ||||
| import pathlib | ||||
| import urllib.request | ||||
| import sys | ||||
| 
 | ||||
| import conf.worker | ||||
| 
 | ||||
| import worker.blender | ||||
| import worker.utils | ||||
| 
 | ||||
| 
 | ||||
| def create_upload( | ||||
|     builder: worker.blender.CodeBuilder, benchmark_path: pathlib.Path, revision: str | ||||
| ) -> None: | ||||
|     # Create package directory. | ||||
|     branch = builder.branch_id.replace("blender-", "").replace("-release", "") | ||||
|     name = f"{branch}-{builder.platform}-{builder.architecture}" | ||||
|     package_dir = builder.package_dir / name | ||||
| 
 | ||||
|     worker.utils.remove_dir(package_dir) | ||||
|     os.makedirs(package_dir, exist_ok=True) | ||||
| 
 | ||||
|     # Fetch existing summary | ||||
|     worker_config = conf.worker.get_config(builder.service_env_id) | ||||
|     base_urls = { | ||||
|         "LOCAL": str(worker_config.buildbot_download_folder), | ||||
|         "UATEST": "https://builder.uatest.blender.org/download", | ||||
|         "PROD": "https://builder.blender.org/download", | ||||
|     } | ||||
|     base_url = base_urls[builder.service_env_id] | ||||
| 
 | ||||
|     summary_json_url = f"{base_url}/daily/benchmarks/{name}/summary.json" | ||||
|     summary_json_path = package_dir / "summary.json" | ||||
|     try: | ||||
|         if builder.service_env_id == "LOCAL": | ||||
|             worker.utils.copy_file(pathlib.Path(summary_json_url), summary_json_path) | ||||
|         else: | ||||
|             urllib.request.urlretrieve(summary_json_url, summary_json_path) | ||||
|     except Exception as e: | ||||
|         error_msg = str(e) | ||||
|         worker.utils.warning(f"Could not retrieve benchmark summary.json: {error_msg}") | ||||
| 
 | ||||
|     # Create json files in package directory. | ||||
|     results_json_path = benchmark_path / "results.json" | ||||
|     revision_json_path = package_dir / f"{revision}.json" | ||||
| 
 | ||||
|     worker.utils.copy_file(results_json_path, revision_json_path) | ||||
| 
 | ||||
|     summary_json = [] | ||||
|     if summary_json_path.exists(): | ||||
|         summary_json = json.loads(summary_json_path.read_text()) | ||||
|     summary_json += json.loads(results_json_path.read_text()) | ||||
|     summary_json_path.write_text(json.dumps(summary_json, indent=2)) | ||||
| 
 | ||||
|     # Create html file in package directory. | ||||
|     report_html_path = package_dir / "report.html" | ||||
|     cmd = [ | ||||
|         sys.executable, | ||||
|         builder.code_path / "tests" / "performance" / "benchmark.py", | ||||
|         "graph", | ||||
|         summary_json_path, | ||||
|         "-o", | ||||
|         report_html_path, | ||||
|     ] | ||||
|     worker.utils.call(cmd) | ||||
| 
 | ||||
| 
 | ||||
| def benchmark(builder: worker.blender.CodeBuilder) -> None: | ||||
|     # Parameters | ||||
|     os.chdir(builder.code_path) | ||||
|     revision = worker.utils.check_output(["git", "rev-parse", "HEAD"]) | ||||
|     revision = revision[:12] | ||||
|     blender_command = builder.blender_command_path() | ||||
|     gpu_device = "METAL" if builder.platform == "darwin" else "OPTIX" | ||||
|     background = False if builder.platform == "darwin" else True | ||||
| 
 | ||||
|     worker.utils.info(f"Benchmark revision {revision}, GPU device {gpu_device}") | ||||
| 
 | ||||
|     # Create clean benchmark folder | ||||
|     benchmark_path = builder.track_path / "benchmark" / "default" | ||||
|     worker.utils.remove_dir(benchmark_path) | ||||
|     os.makedirs(benchmark_path, exist_ok=True) | ||||
| 
 | ||||
|     # Initialize configuration | ||||
|     config_py_path = benchmark_path / "config.py" | ||||
|     config_py_text = f""" | ||||
| devices = ["CPU", "{gpu_device}_0"] | ||||
| background = {background} | ||||
| builds = {{"{revision}": "{blender_command}"}} | ||||
| benchmark_type = "time_series" | ||||
| """ | ||||
|     config_py_path.write_text(config_py_text) | ||||
| 
 | ||||
|     # Checkout benchmark files | ||||
|     tests_benchmarks_path = builder.code_path / "tests" / "benchmarks" | ||||
|     if not tests_benchmarks_path.exists(): | ||||
|         benchmarks_url = "https://projects.blender.org/blender/blender-benchmarks.git" | ||||
|         worker.utils.call(["git", "clone", benchmarks_url, tests_benchmarks_path]) | ||||
| 
 | ||||
|     # Run benchmark | ||||
|     cmd = [ | ||||
|         sys.executable, | ||||
|         builder.code_path / "tests" / "performance" / "benchmark.py", | ||||
|         "list", | ||||
|     ] | ||||
|     worker.utils.call(cmd) | ||||
| 
 | ||||
|     cmd = [ | ||||
|         sys.executable, | ||||
|         builder.code_path / "tests" / "performance" / "benchmark.py", | ||||
|         "run", | ||||
|         "default", | ||||
|     ] | ||||
|     exit_code = worker.utils.call(cmd, exit_on_error=False) | ||||
| 
 | ||||
|     # Write results to be uploaded | ||||
|     create_upload(builder, benchmark_path, revision) | ||||
| 
 | ||||
|     sys.exit(exit_code) | ||||
							
								
								
									
										25
									
								
								config/worker/blender/blender.applescript
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										25
									
								
								config/worker/blender/blender.applescript
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,25 @@ | |||
| tell application "Finder" | ||||
|           tell disk "Blender" | ||||
|                log "applescript: opening [Blender]. This will seem to hang with a pop up dialog on applescript permissions for the first run. You have 10 minutes, get on machine now and push that button !!!" | ||||
|                with timeout of 600 seconds | ||||
|                     open | ||||
|                     log "applescript: yay it opened !" | ||||
|                     log "applescript: setting current view" | ||||
|                     set current view of container window to icon view | ||||
|                     set toolbar visible of container window to false | ||||
|                     set statusbar visible of container window to false | ||||
|                     set the bounds of container window to {100, 100, 640, 472} | ||||
|                     set theViewOptions to icon view options of container window | ||||
|                     set arrangement of theViewOptions to not arranged | ||||
|                     set icon size of theViewOptions to 128 | ||||
|                     set background picture of theViewOptions to file ".background:background.tif" | ||||
|                     set position of item " " of container window to {400, 190} | ||||
|                     set position of item "blender.app" of container window to {135, 190} | ||||
|                     log "applescript: updating applications" | ||||
|                     update without registering applications | ||||
|                     delay 5 | ||||
|                     log "applescript: closing" | ||||
|                     close | ||||
|                end timeout | ||||
|      end tell | ||||
| end tell | ||||
							
								
								
									
										495
									
								
								config/worker/blender/bundle_dmg.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										495
									
								
								config/worker/blender/bundle_dmg.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,495 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import os | ||||
| import re | ||||
| import time | ||||
| import subprocess | ||||
| import platform | ||||
| import pathlib | ||||
| import tempfile | ||||
| import typing | ||||
| 
 | ||||
| import worker.utils | ||||
| 
 | ||||
| # Extra size which is added on top of actual files size when estimating size | ||||
| # of destination DNG. | ||||
| _extra_dmg_size_in_bytes = 800 * 1024 * 1024 | ||||
| 
 | ||||
| ################################################################################ | ||||
| # Common utilities | ||||
| 
 | ||||
| 
 | ||||
| def get_directory_size(root_directory: pathlib.Path) -> int: | ||||
|     """ | ||||
|     Get size of directory on disk | ||||
|     """ | ||||
| 
 | ||||
|     total_size = 0 | ||||
|     for file in root_directory.glob("**/*"): | ||||
|         total_size += file.lstat().st_size | ||||
|     return total_size | ||||
| 
 | ||||
| 
 | ||||
| ################################################################################ | ||||
| # DMG bundling specific logic | ||||
| 
 | ||||
| 
 | ||||
| def collect_app_bundles(source_dir: pathlib.Path) -> typing.List[pathlib.Path]: | ||||
|     """ | ||||
|     Collect all app bundles which are to be put into DMG | ||||
| 
 | ||||
|     If the source directory points to FOO.app it will be the only app bundle | ||||
|     packed. | ||||
| 
 | ||||
|     Otherwise all .app bundles from given directory are placed to a single | ||||
|     DMG. | ||||
|     """ | ||||
| 
 | ||||
|     if source_dir.name.endswith(".app"): | ||||
|         return [source_dir] | ||||
| 
 | ||||
|     app_bundles = [] | ||||
|     for filename in source_dir.glob("*"): | ||||
|         if not filename.is_dir(): | ||||
|             continue | ||||
|         if not filename.name.endswith(".app"): | ||||
|             continue | ||||
| 
 | ||||
|         app_bundles.append(filename) | ||||
| 
 | ||||
|     return app_bundles | ||||
| 
 | ||||
| 
 | ||||
| def collect_and_log_app_bundles(source_dir: pathlib.Path) -> typing.List[pathlib.Path]: | ||||
|     app_bundles = collect_app_bundles(source_dir) | ||||
| 
 | ||||
|     if not app_bundles: | ||||
|         worker.utils.info("No app bundles found for packing") | ||||
|         return [] | ||||
| 
 | ||||
|     worker.utils.info(f"Found {len(app_bundles)} to pack:") | ||||
|     for app_bundle in app_bundles: | ||||
|         worker.utils.info(f"- {app_bundle}") | ||||
| 
 | ||||
|     return app_bundles | ||||
| 
 | ||||
| 
 | ||||
| def estimate_dmg_size(app_bundles: typing.List[pathlib.Path]) -> int: | ||||
|     """ | ||||
|     Estimate size of DMG to hold requested app bundles | ||||
| 
 | ||||
|     The size is based on actual size of all files in all bundles plus some | ||||
|     space to compensate for different size-on-disk plus some space to hold | ||||
|     codesign signatures. | ||||
| 
 | ||||
|     Is better to be on a high side since the empty space is compressed, but | ||||
|     lack of space might cause silent failures later on. | ||||
|     """ | ||||
| 
 | ||||
|     app_bundles_size = 0 | ||||
|     for app_bundle in app_bundles: | ||||
|         app_bundles_size += get_directory_size(app_bundle) | ||||
| 
 | ||||
|     return app_bundles_size + _extra_dmg_size_in_bytes | ||||
| 
 | ||||
| 
 | ||||
| def copy_app_bundles( | ||||
|     app_bundles: typing.List[pathlib.Path], dir_path: pathlib.Path | ||||
| ) -> None: | ||||
|     """ | ||||
|     Copy all bundles to a given directory | ||||
| 
 | ||||
|     This directory is what the DMG will be created from. | ||||
|     """ | ||||
|     for app_bundle in app_bundles: | ||||
|         destination_dir_path = dir_path / app_bundle.name | ||||
| 
 | ||||
|         worker.utils.info(f"Copying app bundle [{app_bundle}] to [{dir_path}]") | ||||
| 
 | ||||
|         worker.utils.copy_dir(app_bundle, destination_dir_path) | ||||
| 
 | ||||
|         # Only chmod if we can;t get cmake install to do it - james | ||||
|         # for r, d, f in os.walk(destination_dir_path): | ||||
|         #     worker.utils.info(f'chmoding [{r}] -> 0o755') | ||||
|         #     os.chmod(r, 0o755) | ||||
| 
 | ||||
| 
 | ||||
| def get_main_app_bundle(app_bundles: typing.List[pathlib.Path]) -> pathlib.Path: | ||||
|     """ | ||||
|     Get application bundle main for the installation | ||||
|     """ | ||||
|     return app_bundles[0] | ||||
| 
 | ||||
| 
 | ||||
| def create_dmg_image( | ||||
|     app_bundles: typing.List[pathlib.Path], | ||||
|     dmg_file_path: pathlib.Path, | ||||
|     volume_name: str, | ||||
| ) -> None: | ||||
|     """ | ||||
|     Create DMG disk image and put app bundles in it | ||||
| 
 | ||||
|     No DMG configuration or codesigning is happening here. | ||||
|     """ | ||||
|     if dmg_file_path.exists(): | ||||
|         worker.utils.info(f"Removing existing writable DMG {dmg_file_path}...") | ||||
|         worker.utils.remove_file(dmg_file_path) | ||||
| 
 | ||||
|     temp_content_path = tempfile.TemporaryDirectory(prefix="blender-dmg-content-") | ||||
|     worker.utils.info( | ||||
|         f"Preparing directory with app bundles for the DMG [{temp_content_path}]" | ||||
|     ) | ||||
|     with temp_content_path as content_dir_str: | ||||
|         # Copy all bundles to a clean directory. | ||||
|         content_dir_path = pathlib.Path(content_dir_str) | ||||
|         # worker.utils.info(f'content_dir_path={content_dir_path}') | ||||
|         copy_app_bundles(app_bundles, content_dir_path) | ||||
| 
 | ||||
|         # Estimate size of the DMG. | ||||
|         dmg_size = estimate_dmg_size(app_bundles) | ||||
|         worker.utils.info(f"Estimated DMG size: [{dmg_size:,}] bytes.") | ||||
| 
 | ||||
|         # Create the DMG. | ||||
|         worker.utils.info(f"Creating writable DMG [{dmg_file_path}]") | ||||
|         command = ( | ||||
|             "hdiutil", | ||||
|             "create", | ||||
|             "-size", | ||||
|             str(dmg_size), | ||||
|             "-fs", | ||||
|             "HFS+", | ||||
|             "-srcfolder", | ||||
|             content_dir_path, | ||||
|             "-volname", | ||||
|             volume_name, | ||||
|             "-format", | ||||
|             "UDRW", | ||||
|             "-mode", | ||||
|             "755", | ||||
|             dmg_file_path, | ||||
|         ) | ||||
| 
 | ||||
|         worker.utils.call(command) | ||||
| 
 | ||||
| 
 | ||||
| def get_writable_dmg_file_path(dmg_file_path: pathlib.Path) -> pathlib.Path: | ||||
|     """ | ||||
|     Get file path for writable DMG image | ||||
|     """ | ||||
|     parent = dmg_file_path.parent | ||||
|     return parent / (dmg_file_path.stem + "-temp.dmg") | ||||
| 
 | ||||
| 
 | ||||
| def mount_readwrite_dmg(dmg_file_path: pathlib.Path) -> None: | ||||
|     """ | ||||
|     Mount writable DMG | ||||
| 
 | ||||
|     Mounting point would be /Volumes/<volume name> | ||||
|     """ | ||||
| 
 | ||||
|     worker.utils.info(f"Mounting read-write DMG ${dmg_file_path}") | ||||
|     cmd: worker.utils.CmdSequence = [ | ||||
|         "hdiutil", | ||||
|         "attach", | ||||
|         "-readwrite", | ||||
|         "-noverify", | ||||
|         "-noautoopen", | ||||
|         dmg_file_path, | ||||
|     ] | ||||
|     worker.utils.call(cmd) | ||||
| 
 | ||||
| 
 | ||||
| def get_mount_directory_for_volume_name(volume_name: str) -> pathlib.Path: | ||||
|     """ | ||||
|     Get directory under which the volume will be mounted | ||||
|     """ | ||||
| 
 | ||||
|     return pathlib.Path("/Volumes") / volume_name | ||||
| 
 | ||||
| 
 | ||||
| def eject_volume(volume_name: str) -> None: | ||||
|     """ | ||||
|     Eject given volume, if mounted | ||||
|     """ | ||||
|     mount_directory = get_mount_directory_for_volume_name(volume_name) | ||||
|     if not mount_directory.exists(): | ||||
|         return | ||||
|     mount_directory_str = str(mount_directory) | ||||
| 
 | ||||
|     worker.utils.info(f"Ejecting volume [{volume_name}]") | ||||
| 
 | ||||
|     # First try through Finder, as sometimes diskutil fails for unknown reasons. | ||||
|     command = [ | ||||
|         "osascript", | ||||
|         "-e", | ||||
|         f"""tell application "Finder" to eject (every disk whose name is "{volume_name}")""", | ||||
|     ] | ||||
|     worker.utils.call(command) | ||||
|     if not mount_directory.exists(): | ||||
|         return | ||||
| 
 | ||||
|     # Figure out which device to eject. | ||||
|     mount_output = subprocess.check_output(["mount"]).decode() | ||||
|     device = "" | ||||
|     for line in mount_output.splitlines(): | ||||
|         if f"on {mount_directory_str} (" not in line: | ||||
|             continue | ||||
|         tokens = line.split(" ", 3) | ||||
|         if len(tokens) < 3: | ||||
|             continue | ||||
|         if tokens[1] != "on": | ||||
|             continue | ||||
|         if device: | ||||
|             raise Exception( | ||||
|                 f"Multiple devices found for mounting point [{mount_directory}]" | ||||
|             ) | ||||
|         device = tokens[0] | ||||
| 
 | ||||
|     if not device: | ||||
|         raise Exception(f"No device found for mounting point [{mount_directory}]") | ||||
| 
 | ||||
|     worker.utils.info( | ||||
|         f"[{mount_directory}] is mounted as device [{device}], ejecting..." | ||||
|     ) | ||||
|     command = ["diskutil", "eject", device] | ||||
|     worker.utils.call(command) | ||||
| 
 | ||||
| 
 | ||||
| def copy_background_if_needed( | ||||
|     background_image_file_path: pathlib.Path, mount_directory: pathlib.Path | ||||
| ) -> None: | ||||
|     """ | ||||
|     Copy background to the DMG | ||||
| 
 | ||||
|     If the background image is not specified it will not be copied. | ||||
|     """ | ||||
| 
 | ||||
|     if not background_image_file_path: | ||||
|         worker.utils.info("No background image provided.") | ||||
|         return | ||||
| 
 | ||||
|     destination_dir = mount_directory / ".background" | ||||
|     destination_dir.mkdir(exist_ok=True) | ||||
| 
 | ||||
|     destination_file_path = destination_dir / background_image_file_path.name | ||||
| 
 | ||||
|     worker.utils.info( | ||||
|         f"Copying background image [{background_image_file_path}] to [{destination_file_path}]" | ||||
|     ) | ||||
|     worker.utils.copy_file(background_image_file_path, destination_file_path) | ||||
| 
 | ||||
| 
 | ||||
| def create_applications_link(mount_directory: pathlib.Path) -> None: | ||||
|     """ | ||||
|     Create link to /Applications in the given location | ||||
|     """ | ||||
|     worker.utils.info(f"Creating link to /Applications -> {mount_directory}") | ||||
|     target_path = mount_directory / " " | ||||
|     cmd: worker.utils.CmdSequence = ["ln", "-s", "/Applications", target_path] | ||||
|     worker.utils.call(cmd) | ||||
| 
 | ||||
| 
 | ||||
| def run_applescript_file_path( | ||||
|     applescript_file_path: pathlib.Path, | ||||
|     volume_name: str, | ||||
|     app_bundles: typing.List[pathlib.Path], | ||||
|     background_image_file_path: pathlib.Path, | ||||
| ) -> None: | ||||
|     """ | ||||
|     Run given applescript to adjust look and feel of the DMG | ||||
|     """ | ||||
|     main_app_bundle = get_main_app_bundle(app_bundles) | ||||
| 
 | ||||
|     architecture = platform.machine().lower() | ||||
|     # needs_run_applescript = (architecture != "x86_64") | ||||
|     needs_run_applescript = True | ||||
| 
 | ||||
|     if not needs_run_applescript: | ||||
|         worker.utils.info( | ||||
|             f"Having issues with apple script on [{architecture}], skipping !" | ||||
|         ) | ||||
|         return | ||||
| 
 | ||||
|     temp_script_file_path = tempfile.NamedTemporaryFile(mode="w", suffix=".applescript") | ||||
|     with temp_script_file_path as temp_applescript_file: | ||||
|         worker.utils.info( | ||||
|             f"Adjusting applescript [{temp_script_file_path.name}] for volume name [{volume_name}]" | ||||
|         ) | ||||
|         # Adjust script to the specific volume name. | ||||
|         with open(applescript_file_path, mode="r") as input_file: | ||||
|             worker.utils.info("Start script update") | ||||
|             for line in input_file.readlines(): | ||||
|                 stripped_line = line.strip() | ||||
|                 if stripped_line.startswith("tell disk"): | ||||
|                     line = re.sub('tell disk ".*"', f'tell disk "{volume_name}"', line) | ||||
|                 elif stripped_line.startswith("set background picture"): | ||||
|                     if not background_image_file_path: | ||||
|                         continue | ||||
|                     else: | ||||
|                         background_image_short = ( | ||||
|                             f".background:{background_image_file_path.name}" | ||||
|                         ) | ||||
|                         line = re.sub( | ||||
|                             'to file ".*"', f'to file "{background_image_short}"', line | ||||
|                         ) | ||||
|                 line = line.replace("blender.app", main_app_bundle.name) | ||||
|                 stripped_line = line.rstrip("\r\n") | ||||
|                 worker.utils.info(f"line={stripped_line}") | ||||
|                 temp_applescript_file.write(line) | ||||
| 
 | ||||
|         temp_applescript_file.flush() | ||||
|         worker.utils.info("End script update") | ||||
| 
 | ||||
|         # This does not help issues when running applescript | ||||
|         worker.utils.info("Updating permissions") | ||||
|         os.chmod(temp_script_file_path.name, 0o755) | ||||
| 
 | ||||
|         # Setting flags to this applescript will fail execution, not permitted | ||||
|         # command = ['chflags', "uchg", temp_script_file_path.name] | ||||
|         # worker.utils.call(command) | ||||
| 
 | ||||
|         command = ["osascript", "-s", "o", temp_script_file_path.name] | ||||
|         worker.utils.call(command) | ||||
| 
 | ||||
|         # NOTE: This is copied from bundle.sh. The exact reason for sleep is | ||||
|         # still remained a mystery. | ||||
|         worker.utils.info("Waiting for applescript...") | ||||
|         time.sleep(5) | ||||
| 
 | ||||
| 
 | ||||
| def compress_dmg( | ||||
|     writable_dmg_file_path: pathlib.Path, final_dmg_file_path: pathlib.Path | ||||
| ) -> None: | ||||
|     """ | ||||
|     Compress temporary read-write DMG | ||||
|     """ | ||||
|     cmd: worker.utils.CmdSequence = [ | ||||
|         "hdiutil", | ||||
|         "convert", | ||||
|         writable_dmg_file_path, | ||||
|         "-format", | ||||
|         "UDZO", | ||||
|         "-o", | ||||
|         final_dmg_file_path, | ||||
|     ] | ||||
| 
 | ||||
|     if final_dmg_file_path.exists(): | ||||
|         worker.utils.info(f"Removing old compressed DMG [{final_dmg_file_path}]") | ||||
|         worker.utils.remove_file(final_dmg_file_path) | ||||
| 
 | ||||
|     worker.utils.info("Compressing disk image...") | ||||
|     worker.utils.call(cmd) | ||||
| 
 | ||||
| 
 | ||||
| def create_final_dmg( | ||||
|     app_bundles: typing.List[pathlib.Path], | ||||
|     dmg_file_path: pathlib.Path, | ||||
|     background_image_file_path: pathlib.Path, | ||||
|     volume_name: str, | ||||
|     applescript_file_path: pathlib.Path, | ||||
| ) -> None: | ||||
|     """ | ||||
|     Create DMG with all app bundles | ||||
| 
 | ||||
|     Will take care configuring background | ||||
|     """ | ||||
| 
 | ||||
|     worker.utils.info("Running all routines to create final DMG") | ||||
| 
 | ||||
|     writable_dmg_file_path = get_writable_dmg_file_path(dmg_file_path) | ||||
|     worker.utils.info(f"Mouting volume [{volume_name}]") | ||||
|     mount_directory = get_mount_directory_for_volume_name(volume_name) | ||||
|     worker.utils.info(f"Mount at [{mount_directory}]") | ||||
| 
 | ||||
|     # Make sure volume is not mounted. | ||||
|     # If it is mounted it will prevent removing old DMG files and could make | ||||
|     # it so app bundles are copied to the wrong place. | ||||
|     eject_volume(volume_name) | ||||
| 
 | ||||
|     worker.utils.info(f"Creating image [{writable_dmg_file_path}] to [{volume_name}]") | ||||
|     create_dmg_image(app_bundles, writable_dmg_file_path, volume_name) | ||||
| 
 | ||||
|     worker.utils.info(f"Mount r/w mode [{writable_dmg_file_path}]") | ||||
|     mount_readwrite_dmg(writable_dmg_file_path) | ||||
| 
 | ||||
|     copy_background_if_needed(background_image_file_path, mount_directory) | ||||
|     create_applications_link(mount_directory) | ||||
| 
 | ||||
|     run_applescript_file_path( | ||||
|         applescript_file_path, volume_name, app_bundles, background_image_file_path | ||||
|     ) | ||||
| 
 | ||||
|     eject_volume(volume_name) | ||||
| 
 | ||||
|     compress_dmg(writable_dmg_file_path, dmg_file_path) | ||||
|     worker.utils.remove_file(writable_dmg_file_path) | ||||
| 
 | ||||
| 
 | ||||
| def ensure_dmg_extension(filepath: pathlib.Path) -> pathlib.Path: | ||||
|     """ | ||||
|     Make sure given file have .dmg extension | ||||
|     """ | ||||
| 
 | ||||
|     if filepath.suffix != ".dmg": | ||||
|         return filepath.with_suffix(f"{filepath.suffix}.dmg") | ||||
|     return filepath | ||||
| 
 | ||||
| 
 | ||||
| def get_dmg_file_path( | ||||
|     requested_file_path: pathlib.Path, app_bundles: typing.List[pathlib.Path] | ||||
| ) -> pathlib.Path: | ||||
|     """ | ||||
|     Get full file path for the final DMG image | ||||
| 
 | ||||
|     Will use the provided one when possible, otherwise will deduct it from | ||||
|     app bundles. | ||||
| 
 | ||||
|     If the name is deducted, the DMG is stored in the current directory. | ||||
|     """ | ||||
| 
 | ||||
|     if requested_file_path: | ||||
|         return ensure_dmg_extension(requested_file_path.absolute()) | ||||
| 
 | ||||
|     # TODO(sergey): This is not necessarily the main one. | ||||
|     main_bundle = app_bundles[0] | ||||
|     # Strip .app from the name | ||||
|     return pathlib.Path(main_bundle.name[:-4] + ".dmg").absolute() | ||||
| 
 | ||||
| 
 | ||||
| def get_volume_name_from_dmg_file_path(dmg_file_path: pathlib.Path) -> str: | ||||
|     """ | ||||
|     Deduct volume name from the DMG path | ||||
| 
 | ||||
|     Will use first part of the DMG file name prior to dash. | ||||
|     """ | ||||
| 
 | ||||
|     tokens = dmg_file_path.stem.split("-") | ||||
|     words = tokens[0].split() | ||||
| 
 | ||||
|     return " ".join(word.capitalize() for word in words) | ||||
| 
 | ||||
| 
 | ||||
| def bundle( | ||||
|     source_dir: pathlib.Path, | ||||
|     dmg_file_path: pathlib.Path, | ||||
|     applescript_file_path: pathlib.Path, | ||||
|     background_image_file_path: pathlib.Path, | ||||
| ) -> None: | ||||
|     app_bundles = collect_and_log_app_bundles(source_dir) | ||||
|     for app_bundle in app_bundles: | ||||
|         worker.utils.info(f"App bundle path [{app_bundle}]") | ||||
| 
 | ||||
|     dmg_file_path = get_dmg_file_path(dmg_file_path, app_bundles) | ||||
|     volume_name = get_volume_name_from_dmg_file_path(dmg_file_path) | ||||
| 
 | ||||
|     worker.utils.info(f"Will produce DMG [{dmg_file_path.name}]") | ||||
| 
 | ||||
|     create_final_dmg( | ||||
|         app_bundles, | ||||
|         dmg_file_path, | ||||
|         background_image_file_path, | ||||
|         volume_name, | ||||
|         applescript_file_path, | ||||
|     ) | ||||
							
								
								
									
										561
									
								
								config/worker/blender/compile.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										561
									
								
								config/worker/blender/compile.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,561 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import multiprocessing | ||||
| import os | ||||
| import platform | ||||
| import pathlib | ||||
| 
 | ||||
| from typing import Dict | ||||
| from pathlib import Path | ||||
| 
 | ||||
| import worker.blender | ||||
| import worker.utils | ||||
| 
 | ||||
| 
 | ||||
| def fetch_ideal_cpu_count(estimate_core_memory_in_mb: int) -> int: | ||||
|     """Fetch cpu ideal for the building process based on machine info""" | ||||
|     worker.utils.info(f"estimate_core_memory_in_mb={estimate_core_memory_in_mb}") | ||||
| 
 | ||||
|     total_cpu_count = multiprocessing.cpu_count() | ||||
|     worker.utils.info(f"total_cpu_count={total_cpu_count}") | ||||
| 
 | ||||
|     ideal_cpu_count = total_cpu_count | ||||
|     spare_cpu_count = 2 | ||||
| 
 | ||||
|     if platform.system().lower() != "darwin": | ||||
|         worker.utils.info(f"In current path {os.getcwd()}") | ||||
|         import psutil | ||||
| 
 | ||||
|         virtual_memory = psutil.virtual_memory() | ||||
|         worker.utils.info(f"virtual_memory={virtual_memory}") | ||||
| 
 | ||||
|         total_memory_in_bytes = virtual_memory.total | ||||
|         worker.utils.info(f"total_memory_in_bytes={total_memory_in_bytes}") | ||||
| 
 | ||||
|         available_memory_in_bytes = virtual_memory.available | ||||
|         worker.utils.info(f"available_memory_in_bytes={available_memory_in_bytes}") | ||||
| 
 | ||||
|         usable_memory_in_bytes = available_memory_in_bytes | ||||
|         worker.utils.info(f"usable_memory_in_bytes={usable_memory_in_bytes}") | ||||
| 
 | ||||
|         estimate_memory_per_code_in_bytes = estimate_core_memory_in_mb * 1024 * 1024 | ||||
|         worker.utils.info( | ||||
|             f"estimate_memory_per_code_in_bytes={estimate_memory_per_code_in_bytes}" | ||||
|         ) | ||||
| 
 | ||||
|         capable_cpu_count = int( | ||||
|             total_memory_in_bytes / estimate_memory_per_code_in_bytes | ||||
|         ) | ||||
|         worker.utils.info(f"capable_cpu_count={capable_cpu_count}") | ||||
| 
 | ||||
|         min_cpu_count = min(total_cpu_count, capable_cpu_count) | ||||
|         worker.utils.info(f"min_cpu_count={min_cpu_count}") | ||||
| 
 | ||||
|         ideal_cpu_count = ( | ||||
|             min_cpu_count if min_cpu_count <= 8 else min_cpu_count - spare_cpu_count | ||||
|         ) | ||||
|         worker.utils.info(f"ideal_cpu_count={ideal_cpu_count}") | ||||
|     return ideal_cpu_count | ||||
| 
 | ||||
| 
 | ||||
| def get_cmake_build_type(builder: worker.blender.CodeBuilder) -> str: | ||||
|     if builder.build_configuration == "debug": | ||||
|         return "Debug" | ||||
|     elif builder.build_configuration == "sanitizer": | ||||
|         # No reliable ASAN on Windows currently. | ||||
|         if builder.platform != "windows": | ||||
|             return "RelWithDebInfo" | ||||
|         else: | ||||
|             return "Release" | ||||
|     else: | ||||
|         return "Release" | ||||
| 
 | ||||
| 
 | ||||
| def get_cmake_options(builder: worker.blender.CodeBuilder) -> worker.utils.CmdSequence: | ||||
|     needs_gtest_compile = not builder.python_module | ||||
| 
 | ||||
|     with_gtests_state = "ON" if needs_gtest_compile else "OFF" | ||||
|     with_gpu_binaries_state = "ON" if builder.needs_gpu_binaries else "OFF" | ||||
|     with_gpu_tests = False | ||||
| 
 | ||||
|     buildbotConfig = builder.pipeline_config() | ||||
| 
 | ||||
|     # This is meant for stable build compilation | ||||
|     config_file_path = "build_files/cmake/config/blender_release.cmake" | ||||
| 
 | ||||
|     platform_config_file_path = None | ||||
|     if builder.platform == "darwin": | ||||
|         platform_config_file_path = "build_files/buildbot/config/blender_macos.cmake" | ||||
|     elif builder.platform == "linux": | ||||
|         platform_config_file_path = "build_files/buildbot/config/blender_linux.cmake" | ||||
|     elif builder.platform == "windows": | ||||
|         platform_config_file_path = "build_files/buildbot/config/blender_windows.cmake" | ||||
| 
 | ||||
|     if platform_config_file_path: | ||||
|         worker.utils.info( | ||||
|             f'Trying platform-specific buildbot configuration "{platform_config_file_path}"' | ||||
|         ) | ||||
|         if (Path(builder.blender_dir) / platform_config_file_path).exists(): | ||||
|             worker.utils.info( | ||||
|                 f'Using platform-specific buildbot configuration "{platform_config_file_path}"' | ||||
|             ) | ||||
|             config_file_path = platform_config_file_path | ||||
|     else: | ||||
|         worker.utils.info(f'Using generic buildbot configuration "{config_file_path}"') | ||||
| 
 | ||||
|     # Must be first so that we can override some of the options found in the file | ||||
|     options = ["-C", os.path.join(builder.blender_dir, config_file_path)] | ||||
| 
 | ||||
|     # Optional build as Python module. | ||||
|     if builder.python_module: | ||||
|         bpy_config_file_path = "build_files/cmake/config/bpy_module.cmake" | ||||
|         options += ["-C", os.path.join(builder.blender_dir, bpy_config_file_path)] | ||||
|         options += ["-DWITH_INSTALL_PORTABLE=ON"] | ||||
| 
 | ||||
|     can_enable_oneapi_binaries = True | ||||
|     if builder.service_env_id != "PROD": | ||||
|         # UATEST machines are too slow currently. | ||||
|         worker.utils.info(f'Disabling oneAPI binaries on "{builder.service_env_id}"') | ||||
|         can_enable_oneapi_binaries = False | ||||
|     if builder.patch_id: | ||||
|         # No enough throughput of the systems to cover AoT oneAPI binaries for patches. | ||||
|         worker.utils.info("Disabling oneAPI binaries for patch build") | ||||
|         can_enable_oneapi_binaries = False | ||||
|     if builder.track_id == "vexp": | ||||
|         # Only enable AoT oneAPI binaries for main and release branches. | ||||
|         worker.utils.info("Disabling oneAPI binaries for branch build") | ||||
|         can_enable_oneapi_binaries = False | ||||
| 
 | ||||
|     # Add platform specific generator and configs | ||||
|     if builder.platform == "darwin": | ||||
|         if builder.needs_ninja: | ||||
|             options += ["-G", "Ninja"] | ||||
|         else: | ||||
|             options += ["-G", "Unix Makefiles"] | ||||
| 
 | ||||
|         options += [f"-DCMAKE_OSX_ARCHITECTURES:STRING={builder.architecture}"] | ||||
| 
 | ||||
|     elif builder.platform == "linux": | ||||
|         if builder.needs_ninja: | ||||
|             options += ["-G", "Ninja"] | ||||
|         else: | ||||
|             options += ["-G", "Unix Makefiles"] | ||||
| 
 | ||||
|     elif builder.platform == "windows": | ||||
|         if builder.needs_ninja: | ||||
|             # set CC=%LLVM_DIR%\bin\clang-cl | ||||
|             # set CXX=%LLVM_DIR%\bin\clang-cl | ||||
|             # set CFLAGS=-m64 -fmsc-version=1922 | ||||
|             # set CXXFLAGS=-m64 -fmsc-version=1922 | ||||
|             vc_tools_install_dir = os.environ.get("VCToolsInstallDir") | ||||
|             if not vc_tools_install_dir: | ||||
|                 raise BaseException("Missing environment variable VCToolsInstallDir") | ||||
| 
 | ||||
|             vc_tool_install_path = pathlib.PureWindowsPath(vc_tools_install_dir) | ||||
|             if builder.architecture == "arm64": | ||||
|                 compiler_file_path = "C:/Program Files/LLVM/bin/clang-cl.exe" | ||||
|                 compiler_file_path = "C:/Program Files/LLVM/bin/clang-cl.exe" | ||||
|                 linker_file_path = "C:/Program Files/LLVM/bin/lld-link.exe" | ||||
|             else: | ||||
|                 vs_tool_install_dir_suffix = "bin/Hostx64/x64" | ||||
|                 compiler_file_path = str( | ||||
|                     vc_tool_install_path / f"{vs_tool_install_dir_suffix}/cl.exe" | ||||
|                 ) | ||||
|                 linker_file_path = str( | ||||
|                     vc_tool_install_path / f"{vs_tool_install_dir_suffix}/link.exe" | ||||
|                 ) | ||||
| 
 | ||||
|             options += ["-G", "Ninja"] | ||||
|             # -DWITH_WINDOWS_SCCACHE=On | ||||
|             options += [ | ||||
|                 f"-DCMAKE_C_COMPILER:FILEPATH={compiler_file_path}", | ||||
|                 f"-DCMAKE_CXX_COMPILER:FILEPATH={compiler_file_path}", | ||||
|             ] | ||||
|             # options += ["-DCMAKE_EXE_LINKER_FLAGS:STRING=/machine:x64"] | ||||
|             options += [f"-DCMAKE_LINKER:FILEPATH={linker_file_path}"] | ||||
|             # Skip the test, it does not work | ||||
|             options += ["-DCMAKE_C_COMPILER_WORKS=1"] | ||||
|             options += ["-DCMAKE_CXX_COMPILER_WORKS=1"] | ||||
| 
 | ||||
|         else: | ||||
|             if builder.architecture == "arm64": | ||||
|                 options += ["-G", "Visual Studio 17 2022", "-A", "arm64"] | ||||
|             else: | ||||
|                 options += ["-G", "Visual Studio 16 2019", "-A", "x64"] | ||||
| 
 | ||||
|     # Add configured overrides | ||||
|     platform_architecure = f"{builder.platform}-{builder.architecture}" | ||||
| 
 | ||||
|     cmake_overrides: Dict[str, str] = {} | ||||
|     cmake_overrides.update(buildbotConfig["cmake"]["default"]["overrides"]) | ||||
|     cmake_overrides.update(buildbotConfig["cmake"][platform_architecure]["overrides"]) | ||||
| 
 | ||||
|     # Disallow certain options | ||||
|     restricted_key_patterns = [ | ||||
|         "POSTINSTALL_SCRIPT", | ||||
|         "OPTIX_", | ||||
|         "CMAKE_OSX_ARCHITECTURES", | ||||
|         "CMAKE_BUILD_TYPE", | ||||
|         "CMAKE_INSTALL_PREFIX", | ||||
|         "WITH_GTESTS", | ||||
|         "CUDA", | ||||
|         "WITH_CYCLES", | ||||
|         "CYCLES_CUDA", | ||||
|     ] | ||||
| 
 | ||||
|     for cmake_key in cmake_overrides.keys(): | ||||
|         for restricted_key_pattern in restricted_key_patterns: | ||||
|             if restricted_key_pattern in cmake_key: | ||||
|                 raise Exception( | ||||
|                     f"CMake key [{cmake_key}] cannot be overriden, aborting" | ||||
|                 ) | ||||
| 
 | ||||
|     for cmake_key, cmake_value in cmake_overrides.items(): | ||||
|         options += [f"-D{cmake_key}={cmake_value}"] | ||||
| 
 | ||||
|     cmake_build_type = get_cmake_build_type(builder) | ||||
|     options += [f"-DCMAKE_BUILD_TYPE:STRING={cmake_build_type}"] | ||||
| 
 | ||||
|     if builder.build_configuration == "sanitizer": | ||||
|         # No reliable ASAN on Windows currently. | ||||
|         if builder.platform != "windows": | ||||
|             options += ["-DWITH_COMPILER_ASAN=ON"] | ||||
|         options += ["-DWITH_ASSERT_RELEASE=ON"] | ||||
|         # Avoid buildbot timeouts, see blender/blender#116635. | ||||
|         options += ["-DWITH_UNITY_BUILD=OFF"] | ||||
|     elif builder.build_configuration == "asserts": | ||||
|         options += ["-DWITH_ASSERT_RELEASE=ON"] | ||||
| 
 | ||||
|     options += [f"-DCMAKE_INSTALL_PREFIX={builder.install_dir}"] | ||||
| 
 | ||||
|     options += ["-DWITH_INSTALL_COPYRIGHT=ON"] | ||||
| 
 | ||||
|     options += [f"-DWITH_GTESTS={with_gtests_state}"] | ||||
| 
 | ||||
|     if builder.platform == "windows": | ||||
|         if builder.architecture != "arm64": | ||||
|             # CUDA + HIP + oneAPI on Windows | ||||
|             options += [f"-DWITH_CYCLES_CUDA_BINARIES={with_gpu_binaries_state}"] | ||||
|             options += [f"-DWITH_CYCLES_HIP_BINARIES={with_gpu_binaries_state}"] | ||||
|             if can_enable_oneapi_binaries: | ||||
|                 options += [f"-DWITH_CYCLES_ONEAPI_BINARIES={with_gpu_binaries_state}"] | ||||
|                 options += ["-DSYCL_OFFLINE_COMPILER_PARALLEL_JOBS=2"] | ||||
|             else: | ||||
|                 options += ["-DWITH_CYCLES_ONEAPI_BINARIES=OFF"] | ||||
|             if "hip" in buildbotConfig: | ||||
|                 hip_version = buildbotConfig["hip"]["version"] | ||||
|             else: | ||||
|                 hip_version = "5.2.21440" | ||||
|             if "ocloc" in buildbotConfig: | ||||
|                 ocloc_version = buildbotConfig["ocloc"]["version"] | ||||
|             else: | ||||
|                 ocloc_version = "dev_01" | ||||
|             options += [f"-DHIP_ROOT_DIR=C:/ProgramData/AMD/HIP/hip_sdk_{hip_version}"] | ||||
|             options += ["-DHIP_PERL_DIR=C:/ProgramData/AMD/HIP/strawberry/perl/bin"] | ||||
|             options += [ | ||||
|                 f"-DOCLOC_INSTALL_DIR=C:/ProgramData/Intel/ocloc/ocloc_{ocloc_version}" | ||||
|             ] | ||||
|     elif builder.platform == "linux": | ||||
|         # CUDA on Linux | ||||
|         options += [f"-DWITH_CYCLES_CUDA_BINARIES={with_gpu_binaries_state}"] | ||||
|         options += [f"-DWITH_CYCLES_HIP_BINARIES={with_gpu_binaries_state}"] | ||||
|         if can_enable_oneapi_binaries: | ||||
|             options += [f"-DWITH_CYCLES_ONEAPI_BINARIES={with_gpu_binaries_state}"] | ||||
|             options += ["-DSYCL_OFFLINE_COMPILER_PARALLEL_JOBS=2"] | ||||
|         else: | ||||
|             options += ["-DWITH_CYCLES_ONEAPI_BINARIES=OFF"] | ||||
| 
 | ||||
|         # Directory changed to just /opt/rocm in 6.x | ||||
|         rocm_path = pathlib.Path("/opt/rocm/hip") | ||||
|         if not rocm_path.exists(): | ||||
|             rocm_path = pathlib.Path("/opt/rocm") | ||||
|         options += [f"-DHIP_ROOT_DIR:PATH={rocm_path}"] | ||||
| 
 | ||||
|         # GPU render tests support Linux + NVIDIA currently | ||||
|         if builder.needs_gpu_tests: | ||||
|             with_gpu_tests = True | ||||
|             if builder.needs_gpu_binaries: | ||||
|                 options += ["-DCYCLES_TEST_DEVICES=CPU;OPTIX"] | ||||
|     elif builder.platform == "darwin": | ||||
|         # Metal on macOS | ||||
|         if builder.architecture == "arm64": | ||||
|             if builder.needs_gpu_tests: | ||||
|                 with_gpu_tests = True | ||||
|             options += ["-DCYCLES_TEST_DEVICES=CPU;METAL"] | ||||
| 
 | ||||
|     if with_gpu_tests: | ||||
|         # Needs X11 or Wayland, and fails with xvfb to emulate X11. | ||||
|         # options += [f"-DWITH_GPU_DRAW_TESTS=ON"] | ||||
|         options += ["-DWITH_GPU_RENDER_TESTS=ON"] | ||||
|         options += ["-DWITH_GPU_RENDER_TESTS_SILENT=OFF"] | ||||
|         options += ["-DWITH_COMPOSITOR_REALTIME_TESTS=ON"] | ||||
| 
 | ||||
|     if "optix" in buildbotConfig: | ||||
|         optix_version = buildbotConfig["optix"]["version"] | ||||
| 
 | ||||
|         if builder.platform == "windows" and builder.architecture != "arm64": | ||||
|             options += [ | ||||
|                 f"-DOPTIX_ROOT_DIR:PATH=C:/ProgramData/NVIDIA Corporation/OptiX SDK {optix_version}" | ||||
|             ] | ||||
|         elif builder.platform == "linux": | ||||
|             optix_base_dir = pathlib.Path.home() / ".devops" / "apps" | ||||
|             options += [ | ||||
|                 f"-DOPTIX_ROOT_DIR:PATH={optix_base_dir}/NVIDIA-OptiX-SDK-{optix_version}-linux64-x86_64" | ||||
|             ] | ||||
| 
 | ||||
|     # Blender 4.3 has switched to pre-compiled HIP-RT libraries. | ||||
|     if "hiprt" in buildbotConfig: | ||||
|         hiprt_version = buildbotConfig["hiprt"]["version"] | ||||
| 
 | ||||
|         if builder.platform == "windows" and builder.architecture != "arm64": | ||||
|             options += [ | ||||
|                 f"-DHIPRT_ROOT_DIR:PATH=C:/ProgramData/AMD/HIP/hiprtsdk-{hiprt_version}/hiprt{hiprt_version}" | ||||
|             ] | ||||
|         elif builder.platform == "linux": | ||||
|             hiprt_base_dir = pathlib.Path.home() / ".devops" / "apps" | ||||
|             options += [ | ||||
|                 f"-DHIPRT_ROOT_DIR:PATH={hiprt_base_dir}/hiprtsdk-{hiprt_version}/hiprt{hiprt_version}" | ||||
|             ] | ||||
|     # Enable option to verify enabled libraries and features did not get disabled. | ||||
|     options += ["-DWITH_STRICT_BUILD_OPTIONS=ON"] | ||||
| 
 | ||||
|     if builder.needs_gpu_binaries: | ||||
|         try: | ||||
|             cuda10_version = buildbotConfig["cuda10"]["version"] | ||||
|         except KeyError: | ||||
|             cuda10_version = buildbotConfig["sdks"]["cuda10"]["version"] | ||||
| 
 | ||||
|         cuda10_folder_version = ".".join(cuda10_version.split(".")[:2]) | ||||
| 
 | ||||
|         try: | ||||
|             cuda11_version = buildbotConfig["cuda11"]["version"] | ||||
|         except KeyError: | ||||
|             cuda11_version = buildbotConfig["sdks"]["cuda11"]["version"] | ||||
| 
 | ||||
|         cuda11_folder_version = ".".join(cuda11_version.split(".")[:2]) | ||||
| 
 | ||||
|         try: | ||||
|             cuda12_version = buildbotConfig["cuda12"]["version"] | ||||
|             cuda12_folder_version = ".".join(cuda12_version.split(".")[:2]) | ||||
|             have_cuda12 = True | ||||
|         except KeyError: | ||||
|             have_cuda12 = False | ||||
| 
 | ||||
|         if builder.platform == "windows" and builder.architecture != "arm64": | ||||
|             # CUDA 10 | ||||
|             cuda10_path = pathlib.Path( | ||||
|                 f"C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v{cuda10_folder_version}" | ||||
|             ) | ||||
|             if not cuda10_path.exists(): | ||||
|                 raise Exception( | ||||
|                     f"Was not able to find CUDA path [{cuda10_path}] for version [{cuda10_version}], aborting" | ||||
|                 ) | ||||
|             cuda10_file_path = cuda10_path / "bin" / "nvcc.exe" | ||||
| 
 | ||||
|             options += [f"-DCUDA10_TOOLKIT_ROOT_DIR:PATH={cuda10_path}"] | ||||
|             options += [f"-DCUDA10_NVCC_EXECUTABLE:FILEPATH={cuda10_file_path}"] | ||||
| 
 | ||||
|             # CUDA 11 | ||||
|             cuda11_path = pathlib.Path( | ||||
|                 f"C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v{cuda11_folder_version}" | ||||
|             ) | ||||
|             if not cuda11_path.exists(): | ||||
|                 raise Exception( | ||||
|                     f"Was not able to find CUDA path [{cuda11_path}] for version [{cuda11_version}], aborting" | ||||
|                 ) | ||||
|             cuda11_file_path = cuda11_path / "bin" / "nvcc.exe" | ||||
| 
 | ||||
|             # CUDA 12 | ||||
|             if have_cuda12: | ||||
|                 cuda12_path = pathlib.Path( | ||||
|                     f"C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v{cuda12_folder_version}" | ||||
|                 ) | ||||
|                 if not cuda12_path.exists(): | ||||
|                     raise Exception( | ||||
|                         f"Was not able to find CUDA path [{cuda12_path}] for version [{cuda12_version}], aborting" | ||||
|                     ) | ||||
|                 cuda12_file_path = cuda12_path / "bin" / "nvcc.exe" | ||||
| 
 | ||||
|                 options += [f"-DCUDA11_TOOLKIT_ROOT_DIR:PATH={cuda11_path}"] | ||||
|                 options += [f"-DCUDA11_NVCC_EXECUTABLE:FILEPATH={cuda11_file_path}"] | ||||
| 
 | ||||
|                 options += [f"-DCUDA_TOOLKIT_ROOT_DIR:PATH={cuda12_path}"] | ||||
|                 options += [f"-DCUDA_NVCC_EXECUTABLE:FILEPATH={cuda12_file_path}"] | ||||
|             else: | ||||
|                 options += [f"-DCUDA_TOOLKIT_ROOT_DIR:PATH={cuda11_path}"] | ||||
|                 options += [f"-DCUDA_NVCC_EXECUTABLE:FILEPATH={cuda11_file_path}"] | ||||
| 
 | ||||
|         elif builder.platform == "linux": | ||||
|             # CUDA 10 | ||||
|             cuda10_path = pathlib.Path(f"/usr/local/cuda-{cuda10_folder_version}") | ||||
|             if not cuda10_path.exists(): | ||||
|                 raise Exception( | ||||
|                     f"Was not able to find CUDA path [{cuda10_path}] for version [{cuda10_version}], aborting" | ||||
|                 ) | ||||
|             cuda10_file_path = cuda10_path / "bin" / "nvcc" | ||||
| 
 | ||||
|             # CUDA 11 | ||||
|             cuda11_path = pathlib.Path(f"/usr/local/cuda-{cuda11_folder_version}") | ||||
|             if not cuda11_path.exists(): | ||||
|                 raise Exception( | ||||
|                     f"Was not able to find CUDA path [{cuda11_path}] for version [{cuda11_version}], aborting" | ||||
|                 ) | ||||
|             cuda11_file_path = cuda11_path / "bin" / "nvcc" | ||||
| 
 | ||||
|             # CUDA 12 | ||||
|             if have_cuda12: | ||||
|                 cuda12_path = pathlib.Path(f"/usr/local/cuda-{cuda12_folder_version}") | ||||
|                 if not cuda12_path.exists(): | ||||
|                     raise Exception( | ||||
|                         f"Was not able to find CUDA path [{cuda12_path}] for version [{cuda12_version}], aborting" | ||||
|                     ) | ||||
|                 cuda12_file_path = cuda12_path / "bin" / "nvcc" | ||||
| 
 | ||||
|             # CUDA 10, must provide compatible host compiler. | ||||
|             options += [f"-DCUDA10_TOOLKIT_ROOT_DIR:PATH={cuda10_path}"] | ||||
| 
 | ||||
|             if pathlib.Path( | ||||
|                 "/etc/rocky-release" | ||||
|             ).exists():  # We check for Rocky. Version 8 has GCC 8 in /usr/bin | ||||
|                 options += [f"-DCUDA10_NVCC_EXECUTABLE:STRING={cuda10_file_path}"] | ||||
|                 options += ["-DCUDA_HOST_COMPILER=/usr/bin/gcc"] | ||||
|             else: | ||||
|                 # Use new CMake option. | ||||
|                 options += [f"-DCUDA10_NVCC_EXECUTABLE:STRING={cuda10_file_path}"] | ||||
|                 options += [ | ||||
|                     "-DCUDA_HOST_COMPILER=/opt/rh/devtoolset-8/root/usr/bin/gcc" | ||||
|                 ] | ||||
| 
 | ||||
|             # CUDA 11 or 12. | ||||
|             if have_cuda12: | ||||
|                 options += [f"-DCUDA11_TOOLKIT_ROOT_DIR:PATH={cuda11_path}"] | ||||
|                 options += [f"-DCUDA11_NVCC_EXECUTABLE:STRING={cuda11_file_path}"] | ||||
| 
 | ||||
|                 options += [f"-DCUDA_TOOLKIT_ROOT_DIR:PATH={cuda12_path}"] | ||||
|                 options += [f"-DCUDA_NVCC_EXECUTABLE:FILEPATH={cuda12_file_path}"] | ||||
|             else: | ||||
|                 options += [f"-DCUDA_TOOLKIT_ROOT_DIR:PATH={cuda11_path}"] | ||||
|                 options += [f"-DCUDA_NVCC_EXECUTABLE:FILEPATH={cuda11_file_path}"] | ||||
| 
 | ||||
|     else: | ||||
|         worker.utils.info("Skipping gpu compilation as requested") | ||||
| 
 | ||||
|     return options | ||||
| 
 | ||||
| 
 | ||||
| def clean_directories(builder: worker.blender.CodeBuilder) -> None: | ||||
|     worker.utils.info( | ||||
|         f"Cleaning directory [{builder.install_dir})] from the previous run" | ||||
|     ) | ||||
|     worker.utils.remove_dir(builder.install_dir) | ||||
| 
 | ||||
|     os.makedirs(builder.build_dir, exist_ok=True) | ||||
| 
 | ||||
|     worker.utils.info("Remove buildinfo files to re-generate them") | ||||
|     for build_info_file_name in ( | ||||
|         "buildinfo.h", | ||||
|         "buildinfo.h.txt", | ||||
|     ): | ||||
|         full_path = builder.build_dir / "source" / "creator" / build_info_file_name | ||||
|         if full_path.exists(): | ||||
|             worker.utils.info(f"Removing file [{full_path}]") | ||||
|             worker.utils.remove_file(full_path) | ||||
| 
 | ||||
| 
 | ||||
| def cmake_configure(builder: worker.blender.CodeBuilder) -> None: | ||||
|     cmake_cache_file_path = builder.build_dir / "CMakeCache.txt" | ||||
|     if cmake_cache_file_path.exists(): | ||||
|         worker.utils.info("Removing CMake cache") | ||||
|         worker.utils.remove_file(cmake_cache_file_path) | ||||
| 
 | ||||
|     worker.utils.info("CMake configure options") | ||||
|     cmake_options = get_cmake_options(builder) | ||||
|     cmd = ["cmake", "-S", builder.blender_dir, "-B", builder.build_dir] + list( | ||||
|         cmake_options | ||||
|     ) | ||||
|     builder.call(cmd) | ||||
| 
 | ||||
|     # This hack does not work as expected, since cmake cache is the always updated, we end up recompiling on each compile step, code, gpu and install | ||||
|     needs_cmake_cache_hack = False | ||||
|     if needs_cmake_cache_hack and pathlib.Path("/usr/lib64/libpthread.a").exists(): | ||||
|         # HACK: The detection for lib pthread does not work on CentOS 7 | ||||
|         worker.utils.warning(f"Hacking file [{cmake_cache_file_path}]") | ||||
|         tmp_cmake_cache_file_path = builder.build_dir / "CMakeCache.txt.tmp" | ||||
|         fin = open(cmake_cache_file_path) | ||||
|         fout = open(tmp_cmake_cache_file_path, "wt") | ||||
|         for line in fin: | ||||
|             # worker.utils.info(line) | ||||
|             if ( | ||||
|                 "OpenMP_pthread_LIBRARY:FILEPATH=OpenMP_pthread_LIBRARY-NOTFOUND" | ||||
|                 in line | ||||
|             ): | ||||
|                 worker.utils.warning( | ||||
|                     "Replacing [OpenMP_pthread_LIBRARY-NOTFOUND] to [/usr/lib64/libpthread.a]" | ||||
|                 ) | ||||
|                 line = line.replace( | ||||
|                     "OpenMP_pthread_LIBRARY:FILEPATH=OpenMP_pthread_LIBRARY-NOTFOUND", | ||||
|                     "OpenMP_pthread_LIBRARY:FILEPATH=/usr/lib64/libpthread.a", | ||||
|                 ) | ||||
|             fout.write(line) | ||||
|         fin.close() | ||||
|         fout.close() | ||||
|         worker.utils.warning(f"Updating [{cmake_cache_file_path}]") | ||||
|         os.replace(tmp_cmake_cache_file_path, cmake_cache_file_path) | ||||
| 
 | ||||
| 
 | ||||
| def cmake_build(builder: worker.blender.CodeBuilder, do_install: bool) -> None: | ||||
|     if builder.track_id in ["vdev", "v430"]: | ||||
|         if builder.platform == "windows": | ||||
|             estimate_gpu_memory_in_mb = 6000 | ||||
|         else: | ||||
|             estimate_gpu_memory_in_mb = 4000 | ||||
|     else: | ||||
|         estimate_gpu_memory_in_mb = 6000 | ||||
| 
 | ||||
|     estimate_core_memory_in_mb = ( | ||||
|         estimate_gpu_memory_in_mb if builder.needs_gpu_binaries else 1000 | ||||
|     ) | ||||
|     ideal_cpu_count = fetch_ideal_cpu_count(estimate_core_memory_in_mb) | ||||
| 
 | ||||
|     # Enable verbose building to make ninja to output more often. | ||||
|     # It should help with slow build commands like OneAPI, as well as will help | ||||
|     # troubleshooting situations when the compile-gpu step times out. | ||||
|     needs_verbose = builder.needs_gpu_binaries | ||||
| 
 | ||||
|     build_type = get_cmake_build_type(builder) | ||||
|     cmd = ["cmake", "--build", builder.build_dir, "--config", build_type] | ||||
|     cmd += ["--parallel", f"{ideal_cpu_count}"] | ||||
|     if do_install: | ||||
|         cmd += ["--target", "install"] | ||||
| 
 | ||||
|     if needs_verbose: | ||||
|         cmd += ["--verbose"] | ||||
| 
 | ||||
|     builder.call(cmd) | ||||
| 
 | ||||
| 
 | ||||
| def compile_code(builder: worker.blender.CodeBuilder) -> None: | ||||
|     builder.needs_gpu_binaries = False | ||||
|     builder.setup_build_environment() | ||||
|     clean_directories(builder) | ||||
|     cmake_configure(builder) | ||||
|     cmake_build(builder, False) | ||||
| 
 | ||||
| 
 | ||||
| def compile_gpu(builder: worker.blender.CodeBuilder) -> None: | ||||
|     if builder.platform == "darwin": | ||||
|         worker.utils.info("Compile GPU not required on macOS") | ||||
|         return | ||||
| 
 | ||||
|     builder.needs_gpu_binaries = True | ||||
|     builder.setup_build_environment() | ||||
|     cmake_configure(builder) | ||||
|     cmake_build(builder, False) | ||||
| 
 | ||||
| 
 | ||||
| def compile_install(builder: worker.blender.CodeBuilder) -> None: | ||||
|     builder.setup_build_environment() | ||||
|     cmake_configure(builder) | ||||
|     cmake_build(builder, True) | ||||
							
								
								
									
										34
									
								
								config/worker/blender/cpack_post.cmake
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										34
									
								
								config/worker/blender/cpack_post.cmake
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,34 @@ | |||
| # ##### BEGIN GPL LICENSE BLOCK ##### | ||||
| # | ||||
| #  This program is free software; you can redistribute it and/or | ||||
| #  modify it under the terms of the GNU General Public License | ||||
| #  as published by the Free Software Foundation; either version 2 | ||||
| #  of the License, or (at your option) any later version. | ||||
| # | ||||
| #  This program is distributed in the hope that it will be useful, | ||||
| #  but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| #  GNU General Public License for more details. | ||||
| # | ||||
| #  You should have received a copy of the GNU General Public License | ||||
| #  along with this program; if not, write to the Free Software Foundation, | ||||
| #  Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. | ||||
| # | ||||
| # ##### END GPL LICENSE BLOCK ##### | ||||
| 
 | ||||
| # This is a script which is used as POST-INSTALL one for regular CMake's | ||||
| # INSTALL target. | ||||
| # | ||||
| # It is used by buildbot workers to sign every binary which is going into | ||||
| # the final bundle. | ||||
| # | ||||
| 
 | ||||
| execute_process( | ||||
|   COMMAND python "${CMAKE_CURRENT_LIST_DIR}/cpack_post.py" "${CMAKE_INSTALL_PREFIX}" | ||||
|   WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR} | ||||
|   RESULT_VARIABLE exit_code | ||||
| ) | ||||
| 
 | ||||
| if(NOT exit_code EQUAL "0") | ||||
|     message(FATAL_ERROR "Non-zero exit code of codesign tool") | ||||
| endif() | ||||
							
								
								
									
										30
									
								
								config/worker/blender/cpack_post.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										30
									
								
								config/worker/blender/cpack_post.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,30 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import pathlib | ||||
| import sys | ||||
| 
 | ||||
| sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent.parent)) | ||||
| 
 | ||||
| import worker.blender.sign | ||||
| import worker.utils | ||||
| 
 | ||||
| path = pathlib.Path(sys.argv[1]).resolve() | ||||
| 
 | ||||
| worker.blender.sign.sign_windows("PROD", path) | ||||
| 
 | ||||
| if str(path).find("Unspecified") != -1: | ||||
|     print("Probably running with cpack command, adding Blender path") | ||||
|     blender_path = path.parent / "Blender" | ||||
|     worker.blender.sign.sign_windows("PROD", blender_path) | ||||
| 
 | ||||
| print("Codesign for cpack is finished") | ||||
| 
 | ||||
| # Only do this for zip | ||||
| if str(path).find("ZIP") != -1: | ||||
|     new_path = path.parent / path.name.replace("-windows64", "") | ||||
|     package_file_path = new_path.parent / (new_path.name + ".zip") | ||||
| 
 | ||||
|     worker.utils.call(["7z", "a", "-tzip", package_file_path, path, "-r"]) | ||||
|     worker.utils.call(["7z", "rn", package_file_path, path.name, new_path.name]) | ||||
							
								
								
									
										45
									
								
								config/worker/blender/lint.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										45
									
								
								config/worker/blender/lint.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,45 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import os | ||||
| import sys | ||||
| 
 | ||||
| import worker.blender | ||||
| import worker.utils | ||||
| 
 | ||||
| 
 | ||||
| def make_format(builder: worker.blender.CodeBuilder) -> bool: | ||||
|     os.chdir(builder.blender_dir) | ||||
| 
 | ||||
|     # Always run formatting with scripts from main, for security on unverified patches. | ||||
|     # TODO: how secure is this? How to test formatting issues in the scripts themselves? | ||||
|     # main_files = [makefile, "tools/utils_maintenance", "build_files/windows"] | ||||
|     # for main_file in main_files: | ||||
|     #     worker.utils.call(['git', 'checkout', 'origin/main', '--', main_file]) | ||||
| 
 | ||||
|     # Run format | ||||
|     if builder.platform == "windows": | ||||
|         builder.call(["make.bat", "format"]) | ||||
|     else: | ||||
|         builder.call(["make", "-f", "GNUmakefile", "format"]) | ||||
| 
 | ||||
|     # Check for changes | ||||
|     diff = worker.utils.check_output(["git", "diff"]) | ||||
|     if len(diff) > 0: | ||||
|         print(diff) | ||||
| 
 | ||||
|     # Reset | ||||
|     worker.utils.call(["git", "checkout", "HEAD", "--", "."]) | ||||
| 
 | ||||
|     if len(diff) > 0: | ||||
|         worker.utils.error('Incorrect formatting detected, run "make format" to fix') | ||||
|         return False | ||||
| 
 | ||||
|     return True | ||||
| 
 | ||||
| 
 | ||||
| def lint(builder: worker.blender.CodeBuilder) -> None: | ||||
|     ok = make_format(builder) | ||||
|     if not ok: | ||||
|         sys.exit(1) | ||||
							
								
								
									
										114
									
								
								config/worker/blender/msix_package.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										114
									
								
								config/worker/blender/msix_package.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,114 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import pathlib | ||||
| import zipfile | ||||
| 
 | ||||
| import worker.utils | ||||
| 
 | ||||
| 
 | ||||
| def pack( | ||||
|     # Version string in the form of 2.83.3.0, this is used in the Store package name | ||||
|     version: str, | ||||
|     # Input file path | ||||
|     input_file_path: pathlib.Path, | ||||
|     # A string in the form of 'CN=PUBLISHER' | ||||
|     publisher: str, | ||||
|     # If set this MSIX is for an LTS release | ||||
|     lts: bool = False, | ||||
|     # If set remove Content folder if it already exists | ||||
|     overwrite: bool = False, | ||||
|     # Don't actually execute commands | ||||
|     dry_run: bool = False, | ||||
| ) -> pathlib.Path: | ||||
|     LTSORNOT = "" | ||||
|     PACKAGETYPE = "" | ||||
|     if lts: | ||||
|         versionparts = version.split(".") | ||||
|         LTSORNOT = f" {versionparts[0]}.{versionparts[1]} LTS" | ||||
|         PACKAGETYPE = f"{versionparts[0]}.{versionparts[1]}LTS" | ||||
| 
 | ||||
|     output_package_file_name = f"{input_file_path.stem}.msix" | ||||
|     output_package_file_path = pathlib.Path(".", output_package_file_name) | ||||
|     content_folder = pathlib.Path(".", "Content") | ||||
|     content_blender_folder = pathlib.Path(content_folder, "Blender") | ||||
|     content_assets_folder = pathlib.Path(content_folder, "Assets") | ||||
|     assets_original_folder = pathlib.Path(".", "Assets") | ||||
| 
 | ||||
|     pri_config_file = pathlib.Path(".", "priconfig.xml") | ||||
|     pri_resources_file = pathlib.Path(content_folder, "resources.pri") | ||||
| 
 | ||||
|     pri_command = [ | ||||
|         "makepri", | ||||
|         "new", | ||||
|         "/pr", | ||||
|         f"{content_folder.absolute()}", | ||||
|         "/cf", | ||||
|         f"{pri_config_file.absolute()}", | ||||
|         "/of", | ||||
|         f"{pri_resources_file.absolute()}", | ||||
|     ] | ||||
| 
 | ||||
|     msix_command = [ | ||||
|         "makeappx", | ||||
|         "pack", | ||||
|         "/h", | ||||
|         "sha256", | ||||
|         "/d", | ||||
|         f"{content_folder.absolute()}", | ||||
|         "/p", | ||||
|         f"{output_package_file_path.absolute()}", | ||||
|     ] | ||||
| 
 | ||||
|     if overwrite: | ||||
|         if content_folder.joinpath("Assets").exists(): | ||||
|             worker.utils.remove_dir(content_folder) | ||||
|     content_folder.mkdir(exist_ok=True) | ||||
|     worker.utils.copy_dir(assets_original_folder, content_assets_folder) | ||||
| 
 | ||||
|     manifest_text = pathlib.Path("AppxManifest.xml.template").read_text() | ||||
|     manifest_text = manifest_text.replace("[VERSION]", version) | ||||
|     manifest_text = manifest_text.replace("[PUBLISHER]", publisher) | ||||
|     manifest_text = manifest_text.replace("[LTSORNOT]", LTSORNOT) | ||||
|     manifest_text = manifest_text.replace("[PACKAGETYPE]", PACKAGETYPE) | ||||
|     pathlib.Path(content_folder, "AppxManifest.xml").write_text(manifest_text) | ||||
| 
 | ||||
|     worker.utils.info( | ||||
|         f"Extracting files from [{input_file_path}] to [{content_blender_folder.absolute()}]" | ||||
|     ) | ||||
| 
 | ||||
|     # Extract the files from the ZIP archive, but skip the leading part of paths | ||||
|     # in the ZIP. We want to write the files to the content_blender_folder where | ||||
|     # blender.exe ends up as ./Content/Blender/blender.exe, and not | ||||
|     # ./Content/Blender/blender-2.83.3-windows64/blender.exe | ||||
|     with zipfile.ZipFile(input_file_path, "r") as blender_zip: | ||||
|         for entry in blender_zip.infolist(): | ||||
|             if entry.is_dir(): | ||||
|                 continue | ||||
|             entry_location = pathlib.Path(entry.filename) | ||||
|             target_location = content_blender_folder.joinpath(*entry_location.parts[1:]) | ||||
|             pathlib.Path(target_location.parent).mkdir(parents=True, exist_ok=True) | ||||
|             extracted_entry = blender_zip.read(entry) | ||||
|             target_location.write_bytes(extracted_entry) | ||||
| 
 | ||||
|     worker.utils.info("... extraction complete.") | ||||
| 
 | ||||
|     worker.utils.info("Generating Package Resource Index (PRI) file") | ||||
|     worker.utils.call(pri_command, dry_run=dry_run) | ||||
| 
 | ||||
|     worker.utils.info(f"Creating MSIX package using command: {' '.join(msix_command)}") | ||||
| 
 | ||||
|     # Remove MSIX file if it already exists. Otherwise the MakeAppX tool | ||||
|     # will hang. | ||||
|     worker.utils.remove_file(output_package_file_path) | ||||
|     worker.utils.call(msix_command, dry_run=dry_run) | ||||
| 
 | ||||
|     if dry_run: | ||||
|         output_package_file_path.write_text("Dry run dummy package file") | ||||
| 
 | ||||
|     worker.utils.remove_dir(content_folder) | ||||
| 
 | ||||
|     worker.utils.info("Done.") | ||||
| 
 | ||||
|     return output_package_file_path | ||||
							
								
								
									
										383
									
								
								config/worker/blender/pack.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										383
									
								
								config/worker/blender/pack.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,383 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| # Runs on buildbot worker, creating a release package using the build | ||||
| # system and zipping it into buildbot_upload.zip. This is then uploaded | ||||
| # to the master in the next buildbot step. | ||||
| 
 | ||||
| import hashlib | ||||
| import json | ||||
| import os | ||||
| import sys | ||||
| import pathlib | ||||
| import tarfile | ||||
| 
 | ||||
| import worker.blender | ||||
| import worker.utils | ||||
| 
 | ||||
| import worker.blender.sign | ||||
| import worker.blender.bundle_dmg | ||||
| import worker.blender.version | ||||
| 
 | ||||
| 
 | ||||
| # SemVer based file naming | ||||
| def get_package_name(builder: worker.blender.CodeBuilder) -> str: | ||||
|     version_info = worker.blender.version.VersionInfo(builder) | ||||
| 
 | ||||
|     # For release branch we will trim redundant info | ||||
|     branch_id = ( | ||||
|         builder.branch_id.replace("/", "-") | ||||
|         .replace(".", "") | ||||
|         .replace("blender-", "") | ||||
|         .replace("-release", "") | ||||
|     ) | ||||
|     package_name = "bpy" if builder.python_module else "blender" | ||||
|     package_name += f"-{version_info.version}" | ||||
|     package_name += f"-{version_info.risk_id}" | ||||
|     package_name += f"+{branch_id}" | ||||
|     if builder.patch_id: | ||||
|         if builder.patch_id.startswith("D"): | ||||
|             package_name += f"-{builder.patch_id}" | ||||
|         else: | ||||
|             package_name += f"-PR{builder.patch_id}" | ||||
| 
 | ||||
|     package_name += f".{version_info.hash}" | ||||
|     package_name += f"-{builder.platform}" | ||||
|     package_name += f".{builder.architecture}" | ||||
|     package_name += f"-{builder.build_configuration}" | ||||
| 
 | ||||
|     return package_name | ||||
| 
 | ||||
| 
 | ||||
| # Generate .sha256 file next to packge | ||||
| def generate_file_hash(package_file_path: pathlib.Path) -> None: | ||||
|     hash_algorithm = hashlib.sha256() | ||||
| 
 | ||||
|     mem_array = bytearray(128 * 1024) | ||||
|     mem_view = memoryview(mem_array) | ||||
|     with open(package_file_path, "rb", buffering=0) as f: | ||||
|         while 1: | ||||
|             # https://github.com/python/typeshed/issues/2166 | ||||
|             n = f.readinto(mem_view)  # type: ignore | ||||
|             if not n: | ||||
|                 break | ||||
|             hash_algorithm.update(mem_view[:n]) | ||||
| 
 | ||||
|     hash_file_path = (package_file_path.parent) / (package_file_path.name + ".sha256") | ||||
|     hash_text = hash_algorithm.hexdigest() | ||||
|     hash_file_path.write_text(hash_text) | ||||
| 
 | ||||
|     worker.utils.info(f"Generated hash [{hash_file_path}]") | ||||
|     print(hash_text) | ||||
| 
 | ||||
| 
 | ||||
| # tar cf archive.tar test.c --owner=0 --group=0 | ||||
| def create_tar_xz(src: pathlib.Path, dest: pathlib.Path, package_name: str) -> None: | ||||
|     # One extra to remove leading os.sep when cleaning root for package_root | ||||
|     ln = len(str(src)) + 1 | ||||
|     flist = list() | ||||
| 
 | ||||
|     # Create list of tuples containing file and archive name | ||||
|     for root, dirs, files in os.walk(src): | ||||
|         package_root = os.path.join(package_name, root[ln:]) | ||||
|         flist.extend( | ||||
|             [ | ||||
|                 (os.path.join(root, file), os.path.join(package_root, file)) | ||||
|                 for file in files | ||||
|             ] | ||||
|         ) | ||||
| 
 | ||||
|     # Set UID/GID of archived files to 0, otherwise they'd be owned by whatever | ||||
|     # user compiled the package. If root then unpacks it to /usr/local/ you get | ||||
|     # a security issue. | ||||
|     def _fakeroot(tarinfo: tarfile.TarInfo) -> tarfile.TarInfo: | ||||
|         tarinfo.gid = 0 | ||||
|         tarinfo.gname = "root" | ||||
|         tarinfo.uid = 0 | ||||
|         tarinfo.uname = "root" | ||||
|         return tarinfo | ||||
| 
 | ||||
|     # Silence false positive mypy error. | ||||
|     package = tarfile.open(dest, "w:xz", preset=6)  # type: ignore[call-arg] | ||||
|     for entry in flist: | ||||
|         worker.utils.info(f"Adding [{entry[0]}] to archive [{entry[1]}]") | ||||
|         package.add(entry[0], entry[1], recursive=False, filter=_fakeroot) | ||||
|     package.close() | ||||
| 
 | ||||
| 
 | ||||
| def cleanup_files(dirpath: pathlib.Path, extension: str) -> None: | ||||
|     if dirpath.exists(): | ||||
|         for filename in os.listdir(dirpath): | ||||
|             filepath = pathlib.Path(os.path.join(dirpath, filename)) | ||||
|             if filepath.is_file() and filename.endswith(extension): | ||||
|                 worker.utils.remove_file(filepath) | ||||
| 
 | ||||
| 
 | ||||
| def pack_mac(builder: worker.blender.CodeBuilder) -> None: | ||||
|     worker.blender.version.VersionInfo(builder) | ||||
| 
 | ||||
|     os.chdir(builder.build_dir) | ||||
|     cleanup_files(builder.package_dir, ".dmg") | ||||
| 
 | ||||
|     package_name = get_package_name(builder) | ||||
|     package_file_name = package_name + ".dmg" | ||||
|     package_file_path = builder.package_dir / package_file_name | ||||
| 
 | ||||
|     applescript_file_path = ( | ||||
|         pathlib.Path(__file__).parent.resolve() / "blender.applescript" | ||||
|     ) | ||||
|     background_image_file_path = ( | ||||
|         builder.blender_dir / "release" / "darwin" / "background.tif" | ||||
|     ) | ||||
| 
 | ||||
|     worker.blender.bundle_dmg.bundle( | ||||
|         builder.install_dir, | ||||
|         package_file_path, | ||||
|         applescript_file_path, | ||||
|         background_image_file_path, | ||||
|     ) | ||||
| 
 | ||||
|     # Sign | ||||
|     worker.blender.sign.sign_darwin_files( | ||||
|         builder, [package_file_path], "entitlements.plist" | ||||
|     ) | ||||
| 
 | ||||
|     # Notarize | ||||
|     worker_config = builder.get_worker_config() | ||||
|     team_id = worker_config.sign_code_darwin_team_id | ||||
|     apple_id = worker_config.sign_code_darwin_apple_id | ||||
|     keychain_profile = worker_config.sign_code_darwin_keychain_profile | ||||
|     timeout = "30m" | ||||
| 
 | ||||
|     if builder.service_env_id == "LOCAL" and not apple_id: | ||||
|         worker.utils.info("Skipping notarization without Apple ID in local build") | ||||
|         return | ||||
| 
 | ||||
|     # Upload file and wait for completion. | ||||
|     notarize_cmd = [ | ||||
|         "xcrun", | ||||
|         "notarytool", | ||||
|         "submit", | ||||
|         package_file_path, | ||||
|         "--apple-id", | ||||
|         worker.utils.HiddenArgument(apple_id), | ||||
|         "--keychain-profile", | ||||
|         worker.utils.HiddenArgument(keychain_profile), | ||||
|         "--team-id", | ||||
|         worker.utils.HiddenArgument(team_id), | ||||
|         "--timeout", | ||||
|         timeout, | ||||
|         "--wait", | ||||
|         "--output-format", | ||||
|         "json", | ||||
|     ] | ||||
| 
 | ||||
|     request = worker.utils.check_output(notarize_cmd) | ||||
| 
 | ||||
|     request_data = json.loads(request) | ||||
|     request_id = request_data["id"] | ||||
|     request_status = request_data["status"] | ||||
| 
 | ||||
|     # Show logs | ||||
|     worker.utils.call( | ||||
|         [ | ||||
|             "xcrun", | ||||
|             "notarytool", | ||||
|             "log", | ||||
|             "--keychain-profile", | ||||
|             keychain_profile, | ||||
|             request_id, | ||||
|         ], | ||||
|         retry_count=5, | ||||
|         retry_wait_time=10.0, | ||||
|     ) | ||||
| 
 | ||||
|     # Failed? | ||||
|     if request_status != "Accepted": | ||||
|         raise Exception("Notarization failed, aborting") | ||||
| 
 | ||||
|     # Staple it | ||||
|     worker.utils.call(["xcrun", "stapler", "staple", package_file_path]) | ||||
| 
 | ||||
|     generate_file_hash(package_file_path) | ||||
| 
 | ||||
| 
 | ||||
| def pack_win(builder: worker.blender.CodeBuilder, pack_format: str) -> None: | ||||
|     os.chdir(builder.build_dir) | ||||
| 
 | ||||
|     if pack_format == "msi": | ||||
|         cpack_type = "WIX" | ||||
|     else: | ||||
|         cpack_type = "ZIP" | ||||
| 
 | ||||
|     package_extension = pack_format | ||||
|     cleanup_files(builder.package_dir, f".{package_extension}") | ||||
| 
 | ||||
|     script_folder_path = pathlib.Path(os.path.realpath(__file__)).parent | ||||
| 
 | ||||
|     # Will take care of codesigning and correct the folder name in zip | ||||
|     # | ||||
|     # Code signing is done as part of INSTALL target, which makes it possible to sign | ||||
|     # files which are aimed into a bundle and coming from a non-signed source (such as | ||||
|     # libraries SVN). | ||||
|     # | ||||
|     # This is achieved by specifying cpack_post.cmake as a post-install script run | ||||
|     # by cpack. cpack_post.ps1 takes care of actual code signing. | ||||
|     post_script_file_path = script_folder_path / "cpack_post.cmake" | ||||
| 
 | ||||
|     app_id = "Blender" | ||||
|     final_package_name = get_package_name(builder) | ||||
|     # MSI needs the app id for the Windows menu folder name | ||||
|     # It will also fail if anything else. | ||||
|     cpack_package_name = app_id if pack_format == "msi" else final_package_name | ||||
| 
 | ||||
|     cmake_cmd = [ | ||||
|         "cmake", | ||||
|         f"-DCPACK_PACKAGE_NAME:STRING={cpack_package_name}", | ||||
|         f"-DCPACK_OVERRIDE_PACKAGENAME:STRING={cpack_package_name}", | ||||
|         # Only works with ZIP, ignored by MSI | ||||
|         # f'-DARCHIVE_FILE:STRING={package_name}', | ||||
|         # f'-DCPACK_PACKAGE_FILE_NAME:STRING={cpack_package_name}', | ||||
|         f"-DCMAKE_INSTALL_PREFIX:PATH={builder.install_dir}", | ||||
|         f"-DPOSTINSTALL_SCRIPT:PATH={post_script_file_path}", | ||||
|         ".", | ||||
|     ] | ||||
|     builder.call(cmake_cmd) | ||||
| 
 | ||||
|     worker.utils.info("Packaging Blender files") | ||||
|     cpack_cmd = [ | ||||
|         "cpack", | ||||
|         "-G", | ||||
|         cpack_type, | ||||
|         # '--verbose', | ||||
|         "--trace-expand", | ||||
|         "-C", | ||||
|         builder.build_configuration, | ||||
|         "-B", | ||||
|         str(builder.package_dir),  # CPACK_PACKAGE_DIRECTORY | ||||
|         "-P", | ||||
|         cpack_package_name, | ||||
|     ] | ||||
|     builder.call(cpack_cmd) | ||||
| 
 | ||||
|     final_package_file_name = f"{final_package_name}.{package_extension}" | ||||
|     final_package_file_path = builder.package_dir / final_package_file_name | ||||
| 
 | ||||
|     # HACK: Rename files correctly, packages are appended `-windows64` with no option to rename | ||||
|     bogus_cpack_file_path = ( | ||||
|         builder.package_dir / f"{cpack_package_name}-windows64.{package_extension}" | ||||
|     ) | ||||
| 
 | ||||
|     if pack_format == "zip": | ||||
|         if bogus_cpack_file_path.exists(): | ||||
|             worker.utils.info(f"Removing bogus file [{bogus_cpack_file_path}]") | ||||
|             worker.utils.remove_file(bogus_cpack_file_path) | ||||
| 
 | ||||
|         source_cpack_file_path = ( | ||||
|             builder.package_dir | ||||
|             / "_CPack_Packages" | ||||
|             / "Windows" | ||||
|             / "ZIP" | ||||
|             / f"{final_package_file_name}" | ||||
|         ) | ||||
|         worker.utils.info( | ||||
|             f"Moving [{source_cpack_file_path}] to [{final_package_file_path}]" | ||||
|         ) | ||||
|         os.rename(source_cpack_file_path, final_package_file_path) | ||||
|     else: | ||||
|         os.rename(bogus_cpack_file_path, final_package_file_path) | ||||
|         version_info = worker.blender.version.VersionInfo(builder) | ||||
|         description = f"Blender {version_info.version}" | ||||
|         worker.blender.sign.sign_windows_files( | ||||
|             builder.service_env_id, [final_package_file_path], description=description | ||||
|         ) | ||||
| 
 | ||||
|     generate_file_hash(final_package_file_path) | ||||
| 
 | ||||
| 
 | ||||
| def pack_linux(builder: worker.blender.CodeBuilder) -> None: | ||||
|     blender_executable = builder.install_dir / "blender" | ||||
| 
 | ||||
|     version_info = worker.blender.version.VersionInfo(builder) | ||||
| 
 | ||||
|     # Strip all unused symbols from the binaries | ||||
|     worker.utils.info("Stripping binaries") | ||||
|     builder.call(["strip", "--strip-all", blender_executable]) | ||||
| 
 | ||||
|     worker.utils.info("Stripping python") | ||||
| 
 | ||||
|     # This should work for 3.0, but for now it is in 3.00 | ||||
|     py_target = builder.install_dir / version_info.short_version | ||||
|     if not os.path.exists(py_target): | ||||
|         # Support older format and current issue with 3.00 | ||||
|         py_target = builder.install_dir / ( | ||||
|             "%d.%02d" % (version_info.major, version_info.minor) | ||||
|         ) | ||||
| 
 | ||||
|     worker.utils.call( | ||||
|         ["find", py_target, "-iname", "*.so", "-exec", "strip", "-s", "{}", ";"] | ||||
|     ) | ||||
| 
 | ||||
|     package_name = get_package_name(builder) | ||||
|     package_file_name = f"{package_name}.tar.xz" | ||||
|     package_file_path = builder.package_dir / package_file_name | ||||
| 
 | ||||
|     worker.utils.info(f"Creating [{package_file_path}] archive") | ||||
| 
 | ||||
|     os.makedirs(builder.package_dir, exist_ok=True) | ||||
| 
 | ||||
|     create_tar_xz(builder.install_dir, package_file_path, package_name) | ||||
| 
 | ||||
|     generate_file_hash(package_file_path) | ||||
| 
 | ||||
| 
 | ||||
| def pack_python_module(builder: worker.blender.CodeBuilder) -> None: | ||||
|     cleanup_files(builder.package_dir, ".whl") | ||||
|     cleanup_files(builder.package_dir, ".zip") | ||||
| 
 | ||||
|     package_name = get_package_name(builder) + ".zip" | ||||
|     package_filepath = builder.package_dir / package_name | ||||
|     pack_script = builder.blender_dir / "build_files" / "utils" / "make_bpy_wheel.py" | ||||
| 
 | ||||
|     # Make wheel | ||||
|     worker.utils.info("Packaging Python Wheel") | ||||
|     cmd = [sys.executable, pack_script, builder.install_dir] | ||||
|     cmd += ["--build-dir", builder.build_dir] | ||||
|     cmd += ["--output-dir", builder.package_dir] | ||||
|     builder.call(cmd) | ||||
| 
 | ||||
|     # Pack wheel in zip, until pipeline and www can deal with .whl files. | ||||
|     import zipfile | ||||
| 
 | ||||
|     with zipfile.ZipFile(package_filepath, "w") as zipf: | ||||
|         for whl_name in os.listdir(builder.package_dir): | ||||
|             if whl_name.endswith(".whl"): | ||||
|                 whl_filepath = builder.package_dir / whl_name | ||||
|                 zipf.write(whl_filepath, arcname=whl_name) | ||||
| 
 | ||||
|     cleanup_files(builder.package_dir, ".whl") | ||||
| 
 | ||||
|     generate_file_hash(package_filepath) | ||||
| 
 | ||||
| 
 | ||||
| def pack(builder: worker.blender.CodeBuilder) -> None: | ||||
|     builder.setup_build_environment() | ||||
| 
 | ||||
|     # Create clean package directory | ||||
|     worker.utils.remove_dir(builder.package_dir) | ||||
|     os.makedirs(builder.package_dir, exist_ok=True) | ||||
| 
 | ||||
|     # Make sure install directory always exists | ||||
|     os.makedirs(builder.install_dir, exist_ok=True) | ||||
| 
 | ||||
|     if builder.python_module: | ||||
|         pack_python_module(builder) | ||||
|     elif builder.platform == "darwin": | ||||
|         pack_mac(builder) | ||||
|     elif builder.platform == "windows": | ||||
|         pack_win(builder, "zip") | ||||
|         if builder.track_id not in ["vdev", "vexp"]: | ||||
|             pack_win(builder, "msi") | ||||
|     elif builder.platform == "linux": | ||||
|         pack_linux(builder) | ||||
							
								
								
									
										205
									
								
								config/worker/blender/sign.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										205
									
								
								config/worker/blender/sign.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,205 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import pathlib | ||||
| import sys | ||||
| 
 | ||||
| from typing import Optional, Sequence | ||||
| 
 | ||||
| import worker.blender | ||||
| import worker.utils | ||||
| 
 | ||||
| 
 | ||||
| def sign_windows_files( | ||||
|     service_env_id: str, | ||||
|     file_paths: Sequence[pathlib.Path], | ||||
|     description: Optional[str] = None, | ||||
|     certificate_id: str = "", | ||||
| ) -> None: | ||||
|     import conf.worker | ||||
| 
 | ||||
|     worker_config = conf.worker.get_config(service_env_id) | ||||
| 
 | ||||
|     # TODO: Rotate them if first 1 fails | ||||
|     worker_config.sign_code_windows_time_servers[0] | ||||
|     server_url = worker_config.sign_code_windows_server_url | ||||
|     if not certificate_id: | ||||
|         certificate_id = worker_config.sign_code_windows_certificate | ||||
| 
 | ||||
|     dry_run = False | ||||
|     if service_env_id == "LOCAL" and not certificate_id: | ||||
|         worker.utils.warning("Performing dry run on LOCAL service environment") | ||||
|         dry_run = True | ||||
| 
 | ||||
|     cmd_args = [ | ||||
|         sys.executable, | ||||
|         "C:\\tools\\codesign.py", | ||||
|         "--server-url", | ||||
|         worker.utils.HiddenArgument(server_url), | ||||
|     ] | ||||
|     if description: | ||||
|         cmd_args += ["--description", description] | ||||
| 
 | ||||
|     cmd: worker.utils.CmdSequence = cmd_args | ||||
| 
 | ||||
|     # Signing one file at a time causes a stampede on servers, resulting in blocking. | ||||
|     # Instead sign in chunks of multiple files. | ||||
|     chunk_size = 25  # Sign how many files at a time | ||||
|     retry_count = 3 | ||||
| 
 | ||||
|     for i in range(0, len(file_paths), chunk_size): | ||||
|         file_chunks = file_paths[i : i + chunk_size] | ||||
|         worker.utils.call( | ||||
|             list(cmd) + list(file_chunks), retry_count=retry_count, dry_run=dry_run | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| def sign_windows(service_env_id: str, install_path: pathlib.Path) -> None: | ||||
|     # TODO: Why use a junction? Is there some failure with long file paths? | ||||
|     # worker.utils.info("Creating building link") | ||||
|     # temp_build_root_path = pathlib.Path("C:/BlenderTemp") | ||||
|     # os.makedirs(temp_build_root_path, exist_ok=True) | ||||
|     # orig_install_path = install_path | ||||
|     # install_path = temp_build_root_path / install_path.name | ||||
| 
 | ||||
|     try: | ||||
|         # TODO | ||||
|         # New-Item -type Junction -path install_path -value orig_install_path | ||||
| 
 | ||||
|         worker.utils.info("Collecting files to process") | ||||
|         file_paths = list(install_path.glob("*.exe")) | ||||
|         file_paths += list(install_path.glob("*.dll")) | ||||
|         file_paths += list(install_path.glob("*.pyd")) | ||||
|         file_paths = [f for f in file_paths if str(f).find("blender.crt") == -1] | ||||
|         for f in file_paths: | ||||
|             print(f) | ||||
| 
 | ||||
|         sign_windows_files(service_env_id, file_paths) | ||||
|     finally: | ||||
|         # worker.utils.info(f"Removing temporary folder {temp_build_root_path}") | ||||
|         # worker.utils.remove_dir(temp_build_root_path, retry_count=5, retry_wait_time=5.0) | ||||
| 
 | ||||
|         # TODO: is this really necessary? | ||||
|         # worker.utils.info("Flushing volume cache...") | ||||
|         # Write-VolumeCache -DriveLetter C | ||||
| 
 | ||||
|         # core_shell_retry_command -retry_count 5 -delay_in_milliseconds 1000 -script_block ` | ||||
|         #    worker.utils.info("Junction information...") | ||||
|         #    junction = Get-Item -Path install_path | ||||
|         #    worker.utils.info(junction | Format-Table) | ||||
|         #    worker.utils.info("Attempting to remove...") | ||||
|         #    junction.Delete() | ||||
|         #    worker.utils.info("Junction deleted!") | ||||
|         pass | ||||
| 
 | ||||
|     worker.utils.info("End of codesign steps") | ||||
| 
 | ||||
| 
 | ||||
| def sign_darwin_files( | ||||
|     builder: worker.blender.CodeBuilder, | ||||
|     file_paths: Sequence[pathlib.Path], | ||||
|     entitlements_file_name: str, | ||||
| ) -> None: | ||||
|     entitlements_path = ( | ||||
|         builder.code_path / "release" / "darwin" / entitlements_file_name | ||||
|     ) | ||||
| 
 | ||||
|     if not entitlements_path.exists(): | ||||
|         raise Exception(f"File {entitlements_path} not found, aborting") | ||||
| 
 | ||||
|     worker_config = builder.get_worker_config() | ||||
|     certificate_id = worker_config.sign_code_darwin_certificate | ||||
| 
 | ||||
|     dry_run = False | ||||
|     if builder.service_env_id == "LOCAL" and not certificate_id: | ||||
|         worker.utils.warning("Performing dry run on LOCAL service environment") | ||||
|         dry_run = True | ||||
| 
 | ||||
|     keychain_password = worker_config.darwin_keychain_password(builder.service_env_id) | ||||
|     cmd: worker.utils.CmdSequence = [ | ||||
|         "security", | ||||
|         "unlock-keychain", | ||||
|         "-p", | ||||
|         worker.utils.HiddenArgument(keychain_password), | ||||
|     ] | ||||
|     worker.utils.call(cmd, dry_run=dry_run) | ||||
| 
 | ||||
|     for file_path in file_paths: | ||||
|         if file_path.is_dir() and file_path.suffix != ".app": | ||||
|             continue | ||||
| 
 | ||||
|         # Remove signature | ||||
|         if file_path.suffix != ".dmg": | ||||
|             worker.utils.call( | ||||
|                 ["codesign", "--remove-signature", file_path], | ||||
|                 exit_on_error=False, | ||||
|                 dry_run=dry_run, | ||||
|             ) | ||||
| 
 | ||||
|         # Add signature | ||||
|         worker.utils.call( | ||||
|             [ | ||||
|                 "codesign", | ||||
|                 "--force", | ||||
|                 "--timestamp", | ||||
|                 "--options", | ||||
|                 "runtime", | ||||
|                 f"--entitlements={entitlements_path}", | ||||
|                 "--sign", | ||||
|                 certificate_id, | ||||
|                 file_path, | ||||
|             ], | ||||
|             retry_count=3, | ||||
|             dry_run=dry_run, | ||||
|         ) | ||||
|         if file_path.suffix == ".app": | ||||
|             worker.utils.info(f"Vaildating app bundle {file_path}") | ||||
|             worker.utils.call( | ||||
|                 ["codesign", "-vvv", "--deep", "--strict", file_path], dry_run=dry_run | ||||
|             ) | ||||
| 
 | ||||
| 
 | ||||
| def sign_darwin(builder: worker.blender.CodeBuilder) -> None: | ||||
|     bundle_path = builder.install_dir / "Blender.app" | ||||
| 
 | ||||
|     # Executables | ||||
|     sign_path = bundle_path / "Contents" / "MacOS" | ||||
|     worker.utils.info(f"Collecting files to process in {sign_path}") | ||||
|     sign_darwin_files(builder, list(sign_path.rglob("*")), "entitlements.plist") | ||||
| 
 | ||||
|     # Thumbnailer app extension. | ||||
|     thumbnailer_appex_path = ( | ||||
|         bundle_path / "Contents" / "PlugIns" / "blender-thumbnailer.appex" | ||||
|     ) | ||||
|     if thumbnailer_appex_path.exists(): | ||||
|         sign_path = thumbnailer_appex_path / "Contents" / "MacOS" | ||||
|         worker.utils.info(f"Collecting files to process in {sign_path}") | ||||
|         sign_darwin_files( | ||||
|             builder, list(sign_path.rglob("*")), "thumbnailer_entitlements.plist" | ||||
|         ) | ||||
| 
 | ||||
|     # Shared librarys and Python | ||||
|     sign_path = bundle_path / "Contents" / "Resources" | ||||
|     worker.utils.info(f"Collecting files to process in {sign_path}") | ||||
|     file_paths = list( | ||||
|         set(sign_path.rglob("*.dylib")) | ||||
|         | set(sign_path.rglob("*.so")) | ||||
|         | set(sign_path.rglob("python3.*")) | ||||
|     ) | ||||
|     sign_darwin_files(builder, file_paths, "entitlements.plist") | ||||
| 
 | ||||
|     # Bundle | ||||
|     worker.utils.info(f"Signing app bundle {bundle_path}") | ||||
|     sign_darwin_files(builder, [bundle_path], "entitlements.plist") | ||||
| 
 | ||||
| 
 | ||||
| def sign(builder: worker.blender.CodeBuilder) -> None: | ||||
|     builder.setup_build_environment() | ||||
| 
 | ||||
|     if builder.platform == "windows": | ||||
|         sign_windows(builder.service_env_id, builder.install_dir) | ||||
|     elif builder.platform == "darwin": | ||||
|         sign_darwin(builder) | ||||
|     else: | ||||
|         worker.utils.info("No code signing to be done on this platform") | ||||
							
								
								
									
										62
									
								
								config/worker/blender/test.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										62
									
								
								config/worker/blender/test.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,62 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import os | ||||
| import shutil | ||||
| 
 | ||||
| from typing import List | ||||
| 
 | ||||
| import worker.blender | ||||
| import worker.blender.pack | ||||
| import worker.blender.compile | ||||
| 
 | ||||
| 
 | ||||
| def get_ctest_arguments(builder: worker.blender.CodeBuilder) -> List[str]: | ||||
|     args = ["--output-on-failure"] | ||||
| 
 | ||||
|     # GPU tests are currently slow and can cause timeouts. | ||||
|     if not builder.needs_gpu_tests: | ||||
|         args += ["--parallel", "4"] | ||||
| 
 | ||||
|     args += ["-C", worker.blender.compile.get_cmake_build_type(builder)] | ||||
|     return args | ||||
| 
 | ||||
| 
 | ||||
| def package_for_upload(builder: worker.blender.CodeBuilder, success: bool) -> None: | ||||
|     build_tests_dir = builder.build_dir / "tests" | ||||
|     package_tests_dir = builder.package_dir / "tests" | ||||
|     if not build_tests_dir.exists(): | ||||
|         return | ||||
| 
 | ||||
|     os.makedirs(package_tests_dir, exist_ok=True) | ||||
| 
 | ||||
|     # Upload package on failure | ||||
|     if not success: | ||||
|         package_filename = "tests-" + worker.blender.pack.get_package_name(builder) | ||||
|         package_filepath = package_tests_dir / package_filename | ||||
|         shutil.copytree(build_tests_dir, package_filepath) | ||||
|         shutil.make_archive( | ||||
|             str(package_filepath), "zip", package_tests_dir, package_filename | ||||
|         ) | ||||
|         shutil.rmtree(package_filepath) | ||||
| 
 | ||||
|     # Always upload unpacked folder for main and release tracks, | ||||
|     # when using GPU tests. This is useful for debugging GPU | ||||
|     # differences. | ||||
|     if builder.track_id != "vexp" and builder.needs_gpu_tests: | ||||
|         branch = builder.branch_id.replace("blender-", "").replace("-release", "") | ||||
|         name = f"{branch}-{builder.platform}-{builder.architecture}" | ||||
|         shutil.copytree(build_tests_dir, package_tests_dir / name) | ||||
| 
 | ||||
| 
 | ||||
| def test(builder: worker.blender.CodeBuilder) -> None: | ||||
|     builder.setup_build_environment() | ||||
|     os.chdir(builder.build_dir) | ||||
|     success = False | ||||
| 
 | ||||
|     try: | ||||
|         builder.call(["ctest"] + get_ctest_arguments(builder)) | ||||
|         success = True | ||||
|     finally: | ||||
|         package_for_upload(builder, success) | ||||
							
								
								
									
										57
									
								
								config/worker/blender/update.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										57
									
								
								config/worker/blender/update.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,57 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import os | ||||
| import sys | ||||
| 
 | ||||
| import worker.blender | ||||
| import worker.utils | ||||
| 
 | ||||
| 
 | ||||
| def _clean_folders(builder: worker.blender.CodeBuilder) -> None: | ||||
|     # Delete build folders. | ||||
|     if builder.needs_full_clean: | ||||
|         worker.utils.remove_dir(builder.build_dir) | ||||
|     else: | ||||
|         worker.utils.remove_dir(builder.build_dir / "Testing") | ||||
|         worker.utils.remove_dir(builder.build_dir / "bin" / "tests") | ||||
| 
 | ||||
|     # Delete install and packaging folders | ||||
|     worker.utils.remove_dir(builder.install_dir) | ||||
|     worker.utils.remove_dir(builder.package_dir) | ||||
| 
 | ||||
| 
 | ||||
| def update(builder: worker.blender.CodeBuilder) -> None: | ||||
|     _clean_folders(builder) | ||||
| 
 | ||||
|     builder.update_source() | ||||
|     os.chdir(builder.code_path) | ||||
| 
 | ||||
|     make_update_path = builder.code_path / "build_files" / "utils" / "make_update.py" | ||||
| 
 | ||||
|     make_update_text = make_update_path.read_text() | ||||
|     if "def svn_update" in make_update_text: | ||||
|         worker.utils.error( | ||||
|             "Can't build branch or pull request that uses Subversion libraries." | ||||
|         ) | ||||
|         worker.utils.error( | ||||
|             "Merge with latest main or release branch to use Git LFS libraries." | ||||
|         ) | ||||
|         sys.exit(1) | ||||
| 
 | ||||
|     # Run make update | ||||
|     cmd = [ | ||||
|         sys.executable, | ||||
|         make_update_path, | ||||
|         "--no-blender", | ||||
|         "--use-linux-libraries", | ||||
|         "--use-tests", | ||||
|         "--architecture", | ||||
|         builder.architecture, | ||||
|     ] | ||||
| 
 | ||||
|     if builder.track_id not in ("v360", "vexp"): | ||||
|         cmd += ["--prune-destructive"] | ||||
| 
 | ||||
|     worker.utils.call(cmd) | ||||
							
								
								
									
										60
									
								
								config/worker/blender/version.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										60
									
								
								config/worker/blender/version.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,60 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import pathlib | ||||
| import re | ||||
| 
 | ||||
| 
 | ||||
| import worker.blender | ||||
| 
 | ||||
| 
 | ||||
| class VersionInfo: | ||||
|     def __init__(self, builder: worker.blender.CodeBuilder): | ||||
|         # Get version information | ||||
|         buildinfo_h = builder.build_dir / "source" / "creator" / "buildinfo.h" | ||||
|         blender_h = ( | ||||
|             builder.blender_dir | ||||
|             / "source" | ||||
|             / "blender" | ||||
|             / "blenkernel" | ||||
|             / "BKE_blender_version.h" | ||||
|         ) | ||||
| 
 | ||||
|         version_number = int(self._parse_header_file(blender_h, "BLENDER_VERSION")) | ||||
| 
 | ||||
|         version_number_patch = int( | ||||
|             self._parse_header_file(blender_h, "BLENDER_VERSION_PATCH") | ||||
|         ) | ||||
|         self.major, self.minor, self.patch = ( | ||||
|             version_number // 100, | ||||
|             version_number % 100, | ||||
|             version_number_patch, | ||||
|         ) | ||||
| 
 | ||||
|         if self.major >= 3: | ||||
|             self.short_version = "%d.%d" % (self.major, self.minor) | ||||
|             self.version = "%d.%d.%d" % (self.major, self.minor, self.patch) | ||||
|         else: | ||||
|             self.short_version = "%d.%02d" % (self.major, self.minor) | ||||
|             self.version = "%d.%02d.%d" % (self.major, self.minor, self.patch) | ||||
| 
 | ||||
|         self.version_cycle = self._parse_header_file(blender_h, "BLENDER_VERSION_CYCLE") | ||||
|         if buildinfo_h.exists(): | ||||
|             self.hash = self._parse_header_file(buildinfo_h, "BUILD_HASH")[1:-1] | ||||
|         else: | ||||
|             self.hash = "" | ||||
|         self.risk_id = self.version_cycle.replace("release", "stable").replace( | ||||
|             "rc", "candidate" | ||||
|         ) | ||||
|         self.is_development_build = self.version_cycle == "alpha" | ||||
| 
 | ||||
|     def _parse_header_file(self, filename: pathlib.Path, define: str) -> str: | ||||
|         regex = re.compile(r"^#\s*define\s+%s\s+(.*)" % define) | ||||
|         with open(filename, "r") as file: | ||||
|             for line in file: | ||||
|                 match = regex.match(line) | ||||
|                 if match: | ||||
|                     return match.group(1) | ||||
| 
 | ||||
|         raise BaseException(f"Failed to parse {filename.name} header for {define}") | ||||
							
								
								
									
										42
									
								
								config/worker/code.py
									
										
									
									
									
										Executable file
									
								
							
							
						
						
									
										42
									
								
								config/worker/code.py
									
										
									
									
									
										Executable file
									
								
							|  | @ -0,0 +1,42 @@ | |||
| #!/usr/bin/env python3 | ||||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import pathlib | ||||
| import sys | ||||
| 
 | ||||
| from collections import OrderedDict | ||||
| 
 | ||||
| sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent)) | ||||
| 
 | ||||
| import worker.configure | ||||
| import worker.utils | ||||
| 
 | ||||
| import worker.blender.update | ||||
| import worker.blender.lint | ||||
| import worker.blender.compile | ||||
| import worker.blender.test | ||||
| import worker.blender.sign | ||||
| import worker.blender.pack | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == "__main__": | ||||
|     steps: worker.utils.BuilderSteps = OrderedDict() | ||||
|     steps["configure-machine"] = worker.configure.configure_machine | ||||
|     steps["update-code"] = worker.blender.update.update | ||||
|     steps["lint-code"] = worker.blender.lint.lint | ||||
|     steps["compile-code"] = worker.blender.compile.compile_code | ||||
|     steps["compile-gpu"] = worker.blender.compile.compile_gpu | ||||
|     steps["compile-install"] = worker.blender.compile.compile_install | ||||
|     steps["test-code"] = worker.blender.test.test | ||||
|     steps["sign-code-binaries"] = worker.blender.sign.sign | ||||
|     steps["package-code-binaries"] = worker.blender.pack.pack | ||||
|     steps["clean"] = worker.blender.CodeBuilder.clean | ||||
| 
 | ||||
|     parser = worker.blender.create_argument_parser(steps=steps) | ||||
| 
 | ||||
|     args = parser.parse_args() | ||||
|     builder = worker.blender.CodeBuilder(args) | ||||
|     builder.setup_track_path() | ||||
|     builder.run(args.step, steps) | ||||
							
								
								
									
										43
									
								
								config/worker/code_benchmark.py
									
										
									
									
									
										Executable file
									
								
							
							
						
						
									
										43
									
								
								config/worker/code_benchmark.py
									
										
									
									
									
										Executable file
									
								
							|  | @ -0,0 +1,43 @@ | |||
| #!/usr/bin/env python3 | ||||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import argparse | ||||
| import pathlib | ||||
| import sys | ||||
| 
 | ||||
| from collections import OrderedDict | ||||
| 
 | ||||
| sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent)) | ||||
| 
 | ||||
| import worker.configure | ||||
| import worker.utils | ||||
| 
 | ||||
| import worker.blender | ||||
| import worker.blender.benchmark | ||||
| import worker.blender.compile | ||||
| import worker.blender.update | ||||
| 
 | ||||
| 
 | ||||
| class BenchmarkBuilder(worker.blender.CodeBuilder): | ||||
|     def __init__(self, args: argparse.Namespace): | ||||
|         super().__init__(args) | ||||
|         self.setup_track_path() | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == "__main__": | ||||
|     steps: worker.utils.BuilderSteps = OrderedDict() | ||||
|     steps["configure-machine"] = worker.configure.configure_machine | ||||
|     steps["update-code"] = worker.blender.update.update | ||||
|     steps["compile-code"] = worker.blender.compile.compile_code | ||||
|     steps["compile-gpu"] = worker.blender.compile.compile_gpu | ||||
|     steps["compile-install"] = worker.blender.compile.compile_install | ||||
|     steps["benchmark"] = worker.blender.benchmark.benchmark | ||||
|     steps["clean"] = worker.blender.CodeBuilder.clean | ||||
| 
 | ||||
|     parser = worker.blender.create_argument_parser(steps=steps) | ||||
| 
 | ||||
|     args = parser.parse_args() | ||||
|     builder = BenchmarkBuilder(args) | ||||
|     builder.run(args.step, steps) | ||||
							
								
								
									
										35
									
								
								config/worker/code_bpy_deploy.py
									
										
									
									
									
										Executable file
									
								
							
							
						
						
									
										35
									
								
								config/worker/code_bpy_deploy.py
									
										
									
									
									
										Executable file
									
								
							|  | @ -0,0 +1,35 @@ | |||
| #!/usr/bin/env python3 | ||||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import pathlib | ||||
| import sys | ||||
| 
 | ||||
| from collections import OrderedDict | ||||
| 
 | ||||
| sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent)) | ||||
| 
 | ||||
| import worker.configure | ||||
| import worker.utils | ||||
| 
 | ||||
| import worker.blender | ||||
| import worker.blender.update | ||||
| 
 | ||||
| import worker.deploy | ||||
| import worker.deploy.pypi | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == "__main__": | ||||
|     steps: worker.utils.BuilderSteps = OrderedDict() | ||||
|     steps["configure-machine"] = worker.configure.configure_machine | ||||
|     steps["update-code"] = worker.blender.update.update | ||||
|     steps["pull"] = worker.deploy.pypi.pull | ||||
|     steps["deliver-pypi"] = worker.deploy.pypi.deliver | ||||
|     steps["clean"] = worker.deploy.CodeDeployBuilder.clean | ||||
| 
 | ||||
|     parser = worker.blender.create_argument_parser(steps=steps) | ||||
| 
 | ||||
|     args = parser.parse_args() | ||||
|     builder = worker.deploy.CodeDeployBuilder(args) | ||||
|     builder.run(args.step, steps) | ||||
							
								
								
									
										40
									
								
								config/worker/code_deploy.py
									
										
									
									
									
										Executable file
									
								
							
							
						
						
									
										40
									
								
								config/worker/code_deploy.py
									
										
									
									
									
										Executable file
									
								
							|  | @ -0,0 +1,40 @@ | |||
| #!/usr/bin/env python3 | ||||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import pathlib | ||||
| import sys | ||||
| 
 | ||||
| from collections import OrderedDict | ||||
| 
 | ||||
| sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent)) | ||||
| 
 | ||||
| import worker.configure | ||||
| import worker.utils | ||||
| 
 | ||||
| import worker.blender | ||||
| import worker.blender.update | ||||
| 
 | ||||
| import worker.deploy | ||||
| import worker.deploy.source | ||||
| import worker.deploy.artifacts | ||||
| import worker.deploy.monitor | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == "__main__": | ||||
|     steps: worker.utils.BuilderSteps = OrderedDict() | ||||
|     steps["configure-machine"] = worker.configure.configure_machine | ||||
|     steps["update-code"] = worker.blender.update.update | ||||
|     steps["pull-artifacts"] = worker.deploy.artifacts.pull | ||||
|     steps["repackage-artifacts"] = worker.deploy.artifacts.repackage | ||||
|     steps["package-source"] = worker.deploy.source.package | ||||
|     steps["deploy-artifacts"] = worker.deploy.artifacts.deploy | ||||
|     steps["monitor-artifacts"] = worker.deploy.monitor.monitor | ||||
|     steps["clean"] = worker.deploy.CodeDeployBuilder.clean | ||||
| 
 | ||||
|     parser = worker.blender.create_argument_parser(steps=steps) | ||||
| 
 | ||||
|     args = parser.parse_args() | ||||
|     builder = worker.deploy.CodeDeployBuilder(args) | ||||
|     builder.run(args.step, steps) | ||||
							
								
								
									
										61
									
								
								config/worker/code_store.py
									
										
									
									
									
										Executable file
									
								
							
							
						
						
									
										61
									
								
								config/worker/code_store.py
									
										
									
									
									
										Executable file
									
								
							|  | @ -0,0 +1,61 @@ | |||
| #!/usr/bin/env python3 | ||||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import pathlib | ||||
| import sys | ||||
| 
 | ||||
| from collections import OrderedDict | ||||
| 
 | ||||
| sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent)) | ||||
| 
 | ||||
| import worker.configure | ||||
| import worker.utils | ||||
| 
 | ||||
| import worker.blender | ||||
| import worker.blender.update | ||||
| 
 | ||||
| import worker.deploy | ||||
| import worker.deploy.artifacts | ||||
| import worker.deploy.snap | ||||
| import worker.deploy.steam | ||||
| import worker.deploy.windows | ||||
| 
 | ||||
| 
 | ||||
| def package(builder: worker.deploy.CodeStoreBuilder) -> None: | ||||
|     if builder.store_id == "snap": | ||||
|         worker.deploy.snap.package(builder) | ||||
|     elif builder.store_id == "steam": | ||||
|         worker.deploy.steam.package(builder) | ||||
|     elif builder.store_id == "windows": | ||||
|         builder.setup_build_environment() | ||||
|         worker.deploy.windows.package(builder) | ||||
| 
 | ||||
| 
 | ||||
| def deliver(builder: worker.deploy.CodeStoreBuilder) -> None: | ||||
|     if builder.store_id == "snap": | ||||
|         worker.deploy.snap.deliver(builder) | ||||
|     elif builder.store_id == "steam": | ||||
|         worker.deploy.steam.deliver(builder) | ||||
|     elif builder.store_id == "windows": | ||||
|         worker.deploy.windows.deliver(builder) | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == "__main__": | ||||
|     steps: worker.utils.BuilderSteps = OrderedDict() | ||||
|     steps["configure-machine"] = worker.configure.configure_machine | ||||
|     steps["update-code"] = worker.blender.update.update | ||||
|     steps["pull-artifacts"] = worker.deploy.artifacts.pull | ||||
|     steps["package"] = package | ||||
|     steps["deliver"] = deliver | ||||
|     steps["clean"] = worker.deploy.CodeDeployBuilder.clean | ||||
| 
 | ||||
|     parser = worker.blender.create_argument_parser(steps=steps) | ||||
|     parser.add_argument( | ||||
|         "--store-id", type=str, choices=["snap", "steam", "windows"], required=True | ||||
|     ) | ||||
| 
 | ||||
|     args = parser.parse_args() | ||||
|     builder = worker.deploy.CodeStoreBuilder(args) | ||||
|     builder.run(args.step, steps) | ||||
							
								
								
									
										208
									
								
								config/worker/configure.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										208
									
								
								config/worker/configure.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,208 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import os | ||||
| import pathlib | ||||
| import platform | ||||
| import psutil | ||||
| import shutil | ||||
| 
 | ||||
| from typing import List, Tuple | ||||
| 
 | ||||
| import worker.utils | ||||
| 
 | ||||
| 
 | ||||
| def get_os_release() -> str: | ||||
|     if platform.system() == "Darwin": | ||||
|         return "macOS " + platform.mac_ver()[0] | ||||
|     else: | ||||
|         return platform.version() | ||||
| 
 | ||||
| 
 | ||||
| def get_cpu_info() -> str: | ||||
|     if platform.system() == "Darwin": | ||||
|         return worker.utils.check_output( | ||||
|             ["/usr/sbin/sysctl", "-n", "machdep.cpu.brand_string"] | ||||
|         ) | ||||
|     elif platform.system() == "Linux": | ||||
|         cpuinfo = pathlib.Path("/proc/cpuinfo").read_text() | ||||
|         for line in cpuinfo.splitlines(): | ||||
|             if line.find("model name") != -1: | ||||
|                 return line.split(":")[1].strip() | ||||
| 
 | ||||
|     return platform.processor() | ||||
| 
 | ||||
| 
 | ||||
| def disk_free_in_gb(builder: worker.utils.Builder) -> float: | ||||
|     _, _, disk_free = shutil.disk_usage(builder.track_path) | ||||
|     return disk_free / (1024.0**3) | ||||
| 
 | ||||
| 
 | ||||
| def get_thread_count(thread_memory_in_GB: float) -> int: | ||||
|     num_threads = psutil.cpu_count() | ||||
|     memory_in_GB = psutil.virtual_memory().total / (1024**3) | ||||
| 
 | ||||
|     return min(int(memory_in_GB / thread_memory_in_GB), num_threads) | ||||
| 
 | ||||
| 
 | ||||
| def clean(builder: worker.utils.Builder) -> None: | ||||
|     # Remove build folders to make space. | ||||
|     delete_paths: List[pathlib.Path] = [] | ||||
|     optional_delete_paths: List[pathlib.Path] = [] | ||||
| 
 | ||||
|     branches_config = builder.get_branches_config() | ||||
|     tracks = branches_config.track_major_minor_versions.keys() | ||||
| 
 | ||||
|     # TODO: don't hardcode these folder and track names | ||||
|     for track in tracks: | ||||
|         track_path = builder.tracks_root_path / ("blender-manual-" + track) | ||||
|         optional_delete_paths += [track_path / "build"] | ||||
| 
 | ||||
|     for track in tracks: | ||||
|         track_path = builder.tracks_root_path / ("blender-" + track) | ||||
|         delete_paths += [track_path / "build_download"] | ||||
|         delete_paths += [track_path / "build_linux"] | ||||
|         delete_paths += [track_path / "build_darwin"] | ||||
|         delete_paths += [track_path / "build_package"] | ||||
|         delete_paths += [track_path / "build_source"] | ||||
|         delete_paths += [track_path / "build_debug"] | ||||
|         delete_paths += [track_path / "build_arm64_debug"] | ||||
|         delete_paths += [track_path / "build_x86_64_debug"] | ||||
|         delete_paths += [track_path / "build_sanitizer"] | ||||
|         delete_paths += [track_path / "build_arm64_sanitizer"] | ||||
|         delete_paths += [track_path / "build_x86_64_sanitizer"] | ||||
|         delete_paths += [track_path / "install_release"] | ||||
|         delete_paths += [track_path / "install_asserts"] | ||||
|         delete_paths += [track_path / "install_sanitizer"] | ||||
|         delete_paths += [track_path / "install_debug"] | ||||
|         delete_paths += [track_path / "benchmark"] | ||||
|         optional_delete_paths += [track_path / "build_release"] | ||||
|         optional_delete_paths += [track_path / "build_arm64_release"] | ||||
|         optional_delete_paths += [track_path / "build_x86_64_release"] | ||||
|         optional_delete_paths += [track_path / "build_asserts"] | ||||
|         optional_delete_paths += [track_path / "build_arm64_asserts"] | ||||
|         optional_delete_paths += [track_path / "build_x86_64_asserts"] | ||||
| 
 | ||||
|     for delete_path in delete_paths: | ||||
|         worker.utils.remove_dir(delete_path) | ||||
| 
 | ||||
|     # Cached build folders only if we are low on disk space | ||||
|     if builder.platform == "darwin": | ||||
|         # On macOS APFS this is not reliable, it makes space on demand. | ||||
|         # This should be ok still. | ||||
|         required_space_gb = 12.0 | ||||
|     else: | ||||
|         required_space_gb = 25.0 | ||||
| 
 | ||||
|     free_space_gb = disk_free_in_gb(builder) | ||||
|     if free_space_gb < required_space_gb: | ||||
|         worker.utils.warning( | ||||
|             f"Trying to delete cached builds for disk space (free {free_space_gb:.2f} GB)" | ||||
|         ) | ||||
|         sorted_paths: List[Tuple[float, pathlib.Path]] = [] | ||||
|         for delete_path in optional_delete_paths: | ||||
|             try: | ||||
|                 sorted_paths.append((os.path.getmtime(delete_path), delete_path)) | ||||
|             except (FileNotFoundError, PermissionError) as e: | ||||
|                 worker.utils.warning(f"Unable to access {delete_path}: {e}") | ||||
| 
 | ||||
|         for _, delete_path in sorted(sorted_paths): | ||||
|             worker.utils.remove_dir(delete_path) | ||||
|             if disk_free_in_gb(builder) >= required_space_gb: | ||||
|                 break | ||||
| 
 | ||||
|     # Might be left over from git command hanging | ||||
|     stack_dump_file_path = builder.code_path / "sh.exe.stackdump" | ||||
|     worker.utils.remove_file(stack_dump_file_path) | ||||
| 
 | ||||
| 
 | ||||
| def configure_machine(builder: worker.utils.Builder) -> None: | ||||
|     worker_config = builder.get_worker_config() | ||||
| 
 | ||||
|     clean(builder) | ||||
| 
 | ||||
|     # Print system information. | ||||
|     processor = get_cpu_info() | ||||
| 
 | ||||
|     worker.utils.info("System information") | ||||
|     print(f"System: {platform.system()}") | ||||
|     print(f"Release: {get_os_release()}") | ||||
|     print(f"Version: {platform.version()}") | ||||
|     print(f"Processor: {processor}") | ||||
|     print( | ||||
|         f"Cores: {psutil.cpu_count()} logical, {psutil.cpu_count(logical=False)} physical" | ||||
|     ) | ||||
|     print(f"Total Memory: {psutil.virtual_memory().total / (1024**3):.2f} GB") | ||||
|     print(f"Available Memory: {psutil.virtual_memory().available / (1024**3):.2f} GB") | ||||
| 
 | ||||
|     disk_total, disk_used, disk_free = shutil.disk_usage(builder.track_path) | ||||
|     print( | ||||
|         f"Disk: total {disk_total / (1024**3):.2f} GB, " | ||||
|         f"used {disk_used / (1024**3):.2f} GB, " | ||||
|         f"free {disk_free / (1024**3):.2f} GB" | ||||
|     ) | ||||
| 
 | ||||
|     # Check dependencies and provision | ||||
|     worker.utils.info("Checking installable software cache") | ||||
|     avilable_software_artifacts = worker_config.software_cache_path.glob("*/*") | ||||
|     for artifact in avilable_software_artifacts: | ||||
|         print(artifact) | ||||
| 
 | ||||
|     # Check packages | ||||
|     if builder.platform == "linux": | ||||
|         etc_rocky = pathlib.Path("/etc/rocky-release") | ||||
| 
 | ||||
|         if etc_rocky.exists(): | ||||
|             worker.utils.call(["yum", "updateinfo"]) | ||||
|             worker.utils.call(["yum", "list", "updates"]) | ||||
|         else: | ||||
|             worker.utils.call(["apt", "list", "--upgradable"]) | ||||
| 
 | ||||
|     elif builder.platform == "windows": | ||||
|         choco_version_str = worker.utils.check_output(["choco", "--version"]) | ||||
|         choco_version = [int(x) for x in choco_version_str.split(".")] | ||||
|         if choco_version[0] >= 2: | ||||
|             # In the newer Chocolatey versions `choco list` behavior got changed | ||||
|             # to only list installed package, and the --localonly flag has been | ||||
|             # removed. | ||||
|             worker.utils.call(["choco", "list"]) | ||||
|         else: | ||||
|             worker.utils.call(["choco", "list", "--lo"]) | ||||
|         worker.utils.call(["choco", "outdated"]) | ||||
| 
 | ||||
|         # Not an actual command, disabled for now. | ||||
|         # worker.utils.call(["scoop", "list"]) | ||||
|         # worker.utils.call(["scoop", "status"]) | ||||
| 
 | ||||
|     elif builder.platform == "darwin": | ||||
|         worker.utils.call(["brew", "update"]) | ||||
|         worker.utils.call(["brew", "outdated", "--cask"]) | ||||
|         worker.utils.call(["xcrun", "--show-sdk-path"]) | ||||
| 
 | ||||
|     # XXX Windows builder debug code | ||||
|     if builder.platform == "windows": | ||||
|         # Ensure the idiff.exe process is stopped. | ||||
|         # It might be hanging there since the previously failed build and it will | ||||
|         # prevent removal of the install directory for the new build (due to held | ||||
|         # open DLLs). | ||||
|         worker.utils.info("Stopping idiff.exe if running") | ||||
| 
 | ||||
|         dump_folder = pathlib.Path("C:\\tmp\\dump\\") | ||||
|         os.makedirs(dump_folder, exist_ok=True) | ||||
| 
 | ||||
|         worker.utils.call(["procdump", "idiff.exe", dump_folder], exit_on_error=False) | ||||
| 
 | ||||
|         for proc in psutil.process_iter(): | ||||
|             if proc.name() == "idiff.exe": | ||||
|                 proc.kill() | ||||
| 
 | ||||
|     for proc in psutil.process_iter(): | ||||
|         if proc.name().lower() in [ | ||||
|             "blender", | ||||
|             "blender.exe", | ||||
|             "blender_test", | ||||
|             "blender_test.exe", | ||||
|         ]: | ||||
|             worker.utils.warning("Killing stray Blender process") | ||||
|             proc.kill() | ||||
							
								
								
									
										41
									
								
								config/worker/deploy/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										41
									
								
								config/worker/deploy/__init__.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,41 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import argparse | ||||
| import pathlib | ||||
| 
 | ||||
| import worker.blender | ||||
| import worker.utils | ||||
| 
 | ||||
| 
 | ||||
| class CodeDeployBuilder(worker.blender.CodeBuilder): | ||||
|     def __init__(self, args: argparse.Namespace): | ||||
|         super().__init__(args) | ||||
|         self.platform_ids = ["linux", "darwin", "windows"] | ||||
|         self.setup_track_path() | ||||
| 
 | ||||
|         track_path: pathlib.Path = self.track_path | ||||
| 
 | ||||
|         self.download_dir = track_path / "build_download" | ||||
|         self.package_source_dir = track_path / "build_source" | ||||
|         self.store_steam_dir = track_path / "build_store_steam" | ||||
|         self.store_snap_dir = track_path / "build_store_snap" | ||||
|         self.store_windows_dir = track_path / "build_store_windows" | ||||
| 
 | ||||
|     def clean(self): | ||||
|         worker.utils.remove_dir(self.download_dir) | ||||
|         worker.utils.remove_dir(self.package_dir) | ||||
|         worker.utils.remove_dir(self.package_source_dir) | ||||
|         worker.utils.remove_dir(self.store_steam_dir) | ||||
|         worker.utils.remove_dir(self.store_snap_dir) | ||||
|         worker.utils.remove_dir(self.store_windows_dir) | ||||
|         # Created by make source_archive_complete | ||||
|         worker.utils.remove_dir(self.track_path / "build_linux") | ||||
|         worker.utils.remove_dir(self.track_path / "build_darwin") | ||||
| 
 | ||||
| 
 | ||||
| class CodeStoreBuilder(CodeDeployBuilder): | ||||
|     def __init__(self, args: argparse.Namespace): | ||||
|         super().__init__(args) | ||||
|         self.store_id = args.store_id | ||||
							
								
								
									
										280
									
								
								config/worker/deploy/artifacts.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										280
									
								
								config/worker/deploy/artifacts.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,280 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import json | ||||
| import os | ||||
| import pathlib | ||||
| import urllib.request | ||||
| 
 | ||||
| from typing import Any, Dict | ||||
| 
 | ||||
| import worker.blender | ||||
| import worker.blender.version | ||||
| import worker.deploy | ||||
| import worker.utils | ||||
| 
 | ||||
| 
 | ||||
| checksums = ["md5", "sha256"] | ||||
| 
 | ||||
| 
 | ||||
| def pull(builder: worker.deploy.CodeDeployBuilder) -> None: | ||||
|     pipeline_category = "daily" | ||||
|     if builder.track_id == "vexp": | ||||
|         pipeline_category = "experimental" | ||||
| 
 | ||||
|     log_path = builder.track_path / "log" | ||||
|     worker.utils.remove_dir(log_path) | ||||
|     os.makedirs(log_path, exist_ok=True) | ||||
| 
 | ||||
|     worker.utils.info("Cleaning package directory") | ||||
|     worker.utils.remove_dir(builder.package_dir) | ||||
|     os.makedirs(builder.package_dir, exist_ok=True) | ||||
| 
 | ||||
|     # Fetch builds information. | ||||
|     env_base_url = { | ||||
|         "LOCAL": "https://builder.blender.org", | ||||
|         "UATEST": "https://builder.uatest.blender.org", | ||||
|         "PROD": "https://builder.blender.org", | ||||
|     } | ||||
|     base_url = env_base_url[builder.service_env_id] | ||||
| 
 | ||||
|     search_url = f"{base_url}/download/{pipeline_category}?format=json&v=1" | ||||
| 
 | ||||
|     worker.utils.info(f"Fetching build JSON from [{search_url}]") | ||||
| 
 | ||||
|     builds_response = urllib.request.urlopen(search_url) | ||||
|     # TODO -timeout_sec timeout_in_seconds -retry_interval_sec retry_delay_in_seconds -maximum_retry_count retry_count | ||||
|     builds_json = json.load(builds_response) | ||||
| 
 | ||||
|     # Get builds matching our version. | ||||
|     worker.utils.info("Processing build JSON") | ||||
|     version_info = worker.blender.version.VersionInfo(builder) | ||||
| 
 | ||||
|     unique_builds: Dict[Any, Dict[Any, Any]] = {} | ||||
|     for build in builds_json: | ||||
|         if build["version"] != version_info.version: | ||||
|             continue | ||||
|         if build["file_extension"] in checksums: | ||||
|             continue | ||||
| 
 | ||||
|         # Correct incomplete file extension in JSON. | ||||
|         if build["file_name"].endswith(".tar.xz"): | ||||
|             build["file_extension"] = "tar.xz" | ||||
|         elif build["file_name"].endswith(".tar.gz"): | ||||
|             build["file_extension"] = "tar.gz" | ||||
|         elif build["file_name"].endswith(".tar.bz2"): | ||||
|             build["file_extension"] = "tar.bz2" | ||||
| 
 | ||||
|         key = (build["platform"], build["architecture"], build["file_extension"]) | ||||
|         if key in unique_builds: | ||||
|             # Prefer more stable builds, to avoid issue when multiple are present. | ||||
|             risk_id_order = ["stable", "candidate", "rc", "beta", "alpha", "edge"] | ||||
|             risk = build["risk_id"] | ||||
|             risk = ( | ||||
|                 risk_id_order.index(risk) | ||||
|                 if risk in risk_id_order | ||||
|                 else len(risk_id_order) | ||||
|             ) | ||||
|             other_risk = unique_builds[key]["risk_id"] | ||||
|             other_risk = ( | ||||
|                 risk_id_order.index(other_risk) | ||||
|                 if other_risk in risk_id_order | ||||
|                 else len(risk_id_order) | ||||
|             ) | ||||
|             if other_risk <= risk: | ||||
|                 continue | ||||
|         else: | ||||
|             print(" ".join(key)) | ||||
| 
 | ||||
|         unique_builds[key] = build | ||||
| 
 | ||||
|     builds = list(unique_builds.values()) | ||||
| 
 | ||||
|     if len(builds) == 0: | ||||
|         raise Exception( | ||||
|             f"No builds found for version [{version_info.version}] in [{search_url}]" | ||||
|         ) | ||||
| 
 | ||||
|     # Download builds. | ||||
|     worker.utils.remove_dir(builder.download_dir) | ||||
|     os.makedirs(builder.download_dir, exist_ok=True) | ||||
| 
 | ||||
|     for build in builds: | ||||
|         file_uri = build["url"] | ||||
|         file_name = build["file_name"] | ||||
| 
 | ||||
|         worker.utils.info(f"Pull [{file_name}]") | ||||
| 
 | ||||
|         download_file_path = builder.download_dir / file_name | ||||
| 
 | ||||
|         worker.utils.info(f"Download [{file_uri}]") | ||||
|         urllib.request.urlretrieve(file_uri, download_file_path) | ||||
|         # TODO: retry and resume | ||||
|         # -resume -timeout_sec timeout_in_seconds -retry_interval_sec retry_delay_in_seconds -maximum_retry_count retry_count | ||||
| 
 | ||||
|         # Moving to build_package folder | ||||
|         worker.utils.info(f"Move to [{builder.package_dir}]") | ||||
|         worker.utils.move( | ||||
|             download_file_path, builder.package_dir / download_file_path.name | ||||
|         ) | ||||
| 
 | ||||
|     worker.utils.remove_dir(builder.download_dir) | ||||
| 
 | ||||
|     # Write manifest of downloaded packages. | ||||
|     package_manifest = builder.package_dir / "manifest.json" | ||||
|     package_manifest.write_text(json.dumps(builds, indent=2)) | ||||
| 
 | ||||
| 
 | ||||
| def repackage(builder: worker.deploy.CodeDeployBuilder) -> None: | ||||
|     version_info = worker.blender.version.VersionInfo(builder) | ||||
| 
 | ||||
|     deployable_path = builder.package_dir / "deployable" | ||||
|     worker.utils.remove_dir(deployable_path) | ||||
|     os.makedirs(deployable_path, exist_ok=True) | ||||
|     os.chdir(deployable_path) | ||||
| 
 | ||||
|     package_manifest = builder.package_dir / "manifest.json" | ||||
|     builds = json.loads(package_manifest.read_text()) | ||||
| 
 | ||||
|     checksum_file_paths = [] | ||||
| 
 | ||||
|     # Rename the files and the internal folders for zip and tar.xz files | ||||
|     for build in builds: | ||||
|         file_name = build["file_name"] | ||||
|         file_path = builder.package_dir / file_name | ||||
| 
 | ||||
|         worker.utils.info(f"Repackaging {file_name}") | ||||
| 
 | ||||
|         if builder.service_env_id == "PROD" and build["risk_id"] != "stable": | ||||
|             raise Exception( | ||||
|                 "Can only repackage and deploy stable versions, found risk id '{build['risk_id']}'" | ||||
|             ) | ||||
| 
 | ||||
|         version = build["version"] | ||||
|         platform = build["platform"].replace("darwin", "macos") | ||||
|         architecture = build["architecture"].replace("86_", "").replace("amd", "x") | ||||
|         file_extension = build["file_extension"] | ||||
| 
 | ||||
|         current_folder_name = file_path.name[: -len("." + file_extension)] | ||||
|         new_folder_name = f"blender-{version}-{platform}-{architecture}" | ||||
|         new_file_name = f"{new_folder_name}.{file_extension}" | ||||
| 
 | ||||
|         source_file_path = file_path | ||||
|         dest_file_path = deployable_path / new_file_name | ||||
| 
 | ||||
|         worker.utils.info(f"Renaming file [{source_file_path}] to [{dest_file_path}]") | ||||
|         worker.utils.copy_file(source_file_path, dest_file_path) | ||||
| 
 | ||||
|         if file_extension == "zip": | ||||
|             worker.utils.info(f"Renaming internal folder to [{new_folder_name}]") | ||||
|             worker.utils.call( | ||||
|                 ["7z", "rn", dest_file_path, current_folder_name, new_folder_name] | ||||
|             ) | ||||
|         elif file_extension == "tar.xz": | ||||
|             worker.utils.info(f"Extracting [{source_file_path}] to [{dest_file_path}]") | ||||
|             worker.utils.call(["tar", "-xf", source_file_path, "--directory", "."]) | ||||
| 
 | ||||
|             worker.utils.remove_file(dest_file_path) | ||||
|             worker.utils.move( | ||||
|                 deployable_path / current_folder_name, deployable_path / new_folder_name | ||||
|             ) | ||||
| 
 | ||||
|             worker.utils.info(f"Compressing [{new_folder_name}] to [{dest_file_path}]") | ||||
|             cmd = [ | ||||
|                 "tar", | ||||
|                 "-cv", | ||||
|                 "--owner=0", | ||||
|                 "--group=0", | ||||
|                 "--use-compress-program", | ||||
|                 "xz -6", | ||||
|                 "-f", | ||||
|                 dest_file_path, | ||||
|                 new_folder_name, | ||||
|             ] | ||||
|             worker.utils.call(cmd) | ||||
|             worker.utils.remove_dir(deployable_path / new_folder_name) | ||||
| 
 | ||||
|         checksum_file_paths.append(dest_file_path) | ||||
| 
 | ||||
|     # Create checksums | ||||
|     worker.utils.info("Creating checksums") | ||||
|     os.chdir(deployable_path) | ||||
| 
 | ||||
|     for checksum in checksums: | ||||
|         checksum_text = "" | ||||
|         for filepath in checksum_file_paths: | ||||
|             checksum_line = worker.utils.check_output( | ||||
|                 [f"{checksum}sum", filepath.name] | ||||
|             ).strip() | ||||
|             checksum_text += checksum_line + "\n" | ||||
| 
 | ||||
|         print(checksum_text) | ||||
|         checksum_filepath = ( | ||||
|             deployable_path / f"blender-{version_info.version}.{checksum}" | ||||
|         ) | ||||
|         checksum_filepath.write_text(checksum_text) | ||||
| 
 | ||||
| 
 | ||||
| def deploy(builder: worker.deploy.CodeDeployBuilder) -> None: | ||||
|     # No testable on UATEST currently. | ||||
|     dry_run = builder.service_env_id not in ("LOCAL", "PROD") | ||||
|     worker_config = builder.get_worker_config() | ||||
|     connect_id = f"{worker_config.download_user}@{worker_config.download_machine}" | ||||
| 
 | ||||
|     # Copy source | ||||
|     remote_dest_path = pathlib.Path(worker_config.download_source_folder) | ||||
|     change_modes = ["F0444"] | ||||
| 
 | ||||
|     if builder.service_env_id != "PROD": | ||||
|         # Already assumed to exist on production | ||||
|         worker.utils.call_ssh( | ||||
|             connect_id, ["mkdir", "-p", remote_dest_path], dry_run=dry_run | ||||
|         ) | ||||
| 
 | ||||
|     for source_path in builder.package_source_dir.iterdir(): | ||||
|         dest_path = f"{connect_id}:{remote_dest_path}/" | ||||
|         worker.utils.info(f"Deploying source package [{source_path}]") | ||||
|         worker.utils.rsync( | ||||
|             source_path, | ||||
|             dest_path, | ||||
|             change_modes=change_modes, | ||||
|             show_names=True, | ||||
|             dry_run=dry_run, | ||||
|         ) | ||||
| 
 | ||||
|     worker.utils.call_ssh( | ||||
|         connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run | ||||
|     ) | ||||
| 
 | ||||
|     # Copy binaries | ||||
|     version_info = worker.blender.version.VersionInfo(builder) | ||||
|     major_minor_version = version_info.short_version | ||||
|     remote_dest_path = ( | ||||
|         pathlib.Path(worker_config.download_release_folder) | ||||
|         / f"Blender{major_minor_version}" | ||||
|     ) | ||||
|     deployable_path = builder.package_dir / "deployable" | ||||
|     change_modes = ["F0444"] | ||||
| 
 | ||||
|     worker.utils.call_ssh( | ||||
|         connect_id, ["mkdir", "-p", remote_dest_path], dry_run=dry_run | ||||
|     ) | ||||
|     worker.utils.call_ssh( | ||||
|         connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run | ||||
|     ) | ||||
| 
 | ||||
|     for source_path in deployable_path.iterdir(): | ||||
|         dest_path = f"{connect_id}:{remote_dest_path}/" | ||||
|         worker.utils.info(f"Deploying binary package [{source_path}]") | ||||
|         worker.utils.rsync( | ||||
|             source_path, | ||||
|             dest_path, | ||||
|             change_modes=change_modes, | ||||
|             show_names=True, | ||||
|             dry_run=dry_run, | ||||
|         ) | ||||
| 
 | ||||
|     worker.utils.call_ssh( | ||||
|         connect_id, ["ls", "-al", f"{remote_dest_path}/"], dry_run=dry_run | ||||
|     ) | ||||
							
								
								
									
										116
									
								
								config/worker/deploy/monitor.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										116
									
								
								config/worker/deploy/monitor.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,116 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import re | ||||
| import time | ||||
| import urllib.request | ||||
| 
 | ||||
| import worker.blender.version | ||||
| import worker.deploy.artifacts | ||||
| import worker.deploy | ||||
| import worker.utils | ||||
| 
 | ||||
| 
 | ||||
| def monitor(builder: worker.deploy.CodeDeployBuilder) -> None: | ||||
|     wait_time_in_seconds = 120 | ||||
| 
 | ||||
|     start_time = time.time() | ||||
|     max_time_hours = 4.0 | ||||
| 
 | ||||
|     version_info = worker.blender.version.VersionInfo(builder) | ||||
| 
 | ||||
|     required_base_url = "https://mirror.clarkson.edu/blender/release" | ||||
|     monitored_base_urls = [ | ||||
|         "https://download.blender.org/release", | ||||
|         "https://ftp.nluug.nl/pub/graphics/blender/release", | ||||
|         "https://ftp.halifax.rwth-aachen.de/blender/release", | ||||
|         "https://mirrors.dotsrc.org/blender/blender-release", | ||||
|         "https://mirrors.ocf.berkeley.edu/blender/release", | ||||
|         "https://mirrors.iu13.net/blender/release", | ||||
|         "https://mirrors.aliyun.com/blender/release", | ||||
|         "https://mirrors.sahilister.in/blender/release", | ||||
|         "https://mirror.freedif.org/blender/release", | ||||
|         required_base_url, | ||||
|     ] | ||||
| 
 | ||||
|     stop_on_required_site_found = False | ||||
| 
 | ||||
|     branches_config = builder.get_branches_config() | ||||
|     expected_platforms = branches_config.code_official_platform_architectures[ | ||||
|         builder.track_id | ||||
|     ] | ||||
| 
 | ||||
|     expected_file_count = len(worker.deploy.artifacts.checksums) | ||||
|     for expected_platform in expected_platforms: | ||||
|         if expected_platform.startswith("windows"): | ||||
|             expected_file_count += 3  # msi, msix, zip | ||||
|         else: | ||||
|             expected_file_count += 1 | ||||
| 
 | ||||
|     folder_name = f"Blender{version_info.short_version}" | ||||
|     file_pattern = rf"[Bb]lender-{version_info.version}[\.\-\_a-zA-Z0-9]*" | ||||
| 
 | ||||
|     while True: | ||||
|         found_site_count = 0 | ||||
|         print("=" * 80) | ||||
| 
 | ||||
|         # Assume no files are missing | ||||
|         sites_missing_files_count = 0 | ||||
| 
 | ||||
|         for base_url in monitored_base_urls: | ||||
|             search_url = f"{base_url}/{folder_name}" | ||||
|             print(f"Checking [{search_url}] for version [{version_info.version}]") | ||||
| 
 | ||||
|             # Header to avoid getting permission denied. | ||||
|             request = urllib.request.Request( | ||||
|                 search_url, headers={"User-Agent": "Mozilla"} | ||||
|             ) | ||||
| 
 | ||||
|             try: | ||||
|                 response = urllib.request.urlopen(request, timeout=5.0) | ||||
|                 text = response.read().decode("utf-8", "ignore") | ||||
|             except Exception as e: | ||||
|                 print(e) | ||||
|                 text = "" | ||||
| 
 | ||||
|             matches = set(re.findall(file_pattern, text)) | ||||
|             len(matches) | ||||
|             for match in matches: | ||||
|                 print(f"File [{match}]") | ||||
| 
 | ||||
|             if len(matches) == expected_file_count: | ||||
|                 found_site_count += 1 | ||||
|             elif len(matches) > 0: | ||||
|                 sites_missing_files_count += 1 | ||||
|             print("-" * 80) | ||||
| 
 | ||||
|             can_stop_monitoring = ( | ||||
|                 (len(matches) == expected_file_count) | ||||
|                 and (base_url == required_base_url) | ||||
|                 and (sites_missing_files_count == 0) | ||||
|             ) | ||||
| 
 | ||||
|             if stop_on_required_site_found and can_stop_monitoring: | ||||
|                 print(f"Required site found [{required_base_url}], stopping") | ||||
|                 return | ||||
| 
 | ||||
|         print("") | ||||
|         print("=" * 80) | ||||
|         print( | ||||
|             f"Sites [{found_site_count} of {len(monitored_base_urls)}] have all files" | ||||
|         ) | ||||
|         print("=" * 80) | ||||
| 
 | ||||
|         if found_site_count == len(monitored_base_urls): | ||||
|             break | ||||
| 
 | ||||
|         remaining_time_hours = max_time_hours - (time.time() - start_time) / 3600.0 | ||||
|         if remaining_time_hours < 0.0: | ||||
|             print("Waited for maximum amount of time, stopping") | ||||
|             break | ||||
| 
 | ||||
|         print( | ||||
|             f"Waiting {wait_time_in_seconds}s, total wait time remaining {remaining_time_hours:.2f}h" | ||||
|         ) | ||||
|         time.sleep(wait_time_in_seconds) | ||||
							
								
								
									
										107
									
								
								config/worker/deploy/pypi.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										107
									
								
								config/worker/deploy/pypi.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,107 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import json | ||||
| import os | ||||
| import urllib.request | ||||
| import zipfile | ||||
| 
 | ||||
| 
 | ||||
| import worker.blender | ||||
| import worker.blender.version | ||||
| import worker.deploy | ||||
| import worker.utils | ||||
| 
 | ||||
| 
 | ||||
| def pull(builder: worker.deploy.CodeDeployBuilder) -> None: | ||||
|     version_info = worker.blender.version.VersionInfo(builder) | ||||
| 
 | ||||
|     worker.utils.info("Cleaning package and download directory") | ||||
|     worker.utils.remove_dir(builder.package_dir) | ||||
|     worker.utils.remove_dir(builder.download_dir) | ||||
|     os.makedirs(builder.package_dir, exist_ok=True) | ||||
|     os.makedirs(builder.download_dir, exist_ok=True) | ||||
| 
 | ||||
|     # Fetch builds information. | ||||
|     env_base_url = { | ||||
|         "LOCAL": "https://builder.blender.org", | ||||
|         "UATEST": "https://builder.uatest.blender.org", | ||||
|         "PROD": "https://builder.blender.org", | ||||
|     } | ||||
|     base_url = env_base_url[builder.service_env_id] | ||||
| 
 | ||||
|     search_url = f"{base_url}/download/bpy/?format=json&v=1" | ||||
| 
 | ||||
|     worker.utils.info(f"Fetching build JSON from [{search_url}]") | ||||
| 
 | ||||
|     builds_response = urllib.request.urlopen(search_url) | ||||
|     builds_json = json.load(builds_response) | ||||
| 
 | ||||
|     # Get builds matching our version. | ||||
|     worker.utils.info("Processing build JSON") | ||||
| 
 | ||||
|     matching_builds = [] | ||||
|     for build in builds_json: | ||||
|         if build["version"] != version_info.version: | ||||
|             continue | ||||
|         if not build["file_name"].endswith(".zip"): | ||||
|             continue | ||||
|         worker.utils.info(f"Found {build['file_name']}") | ||||
|         if build["risk_id"] != "stable": | ||||
|             raise Exception("Can not only deploy stable releases") | ||||
|         matching_builds.append(build) | ||||
| 
 | ||||
|     # Check expected platforms | ||||
|     branches_config = builder.get_branches_config() | ||||
|     expected_platforms = branches_config.code_official_platform_architectures[ | ||||
|         builder.track_id | ||||
|     ] | ||||
|     if len(expected_platforms) != len(matching_builds): | ||||
|         platform_names = "\n".join(expected_platforms) | ||||
|         raise Exception("Unexpected number of builds, expected:\n" + platform_names) | ||||
| 
 | ||||
|     # Download builds. | ||||
|     for build in matching_builds: | ||||
|         file_uri = build["url"] | ||||
|         file_name = build["file_name"] | ||||
| 
 | ||||
|         worker.utils.info(f"Download [{file_uri}]") | ||||
|         download_file_path = builder.download_dir / file_name | ||||
|         urllib.request.urlretrieve(file_uri, download_file_path) | ||||
| 
 | ||||
|         # Unzip. | ||||
|         with zipfile.ZipFile(download_file_path, "r") as zipf: | ||||
|             zipf.extractall(path=builder.package_dir) | ||||
| 
 | ||||
|     worker.utils.remove_dir(builder.download_dir) | ||||
| 
 | ||||
| 
 | ||||
| def deliver(builder: worker.deploy.CodeDeployBuilder) -> None: | ||||
|     dry_run = builder.service_env_id != "PROD" | ||||
|     wheels = list(builder.package_dir.glob("*.whl")) | ||||
| 
 | ||||
|     # Check expected platforms | ||||
|     branches_config = builder.get_branches_config() | ||||
|     expected_platforms = branches_config.code_official_platform_architectures[ | ||||
|         builder.track_id | ||||
|     ] | ||||
|     wheel_names = "\n".join([wheel.name for wheel in wheels]) | ||||
|     wheel_paths = [str(wheel) for wheel in wheels] | ||||
|     print(wheel_names) | ||||
|     if len(expected_platforms) != len(wheels): | ||||
|         raise Exception("Unexpected number of wheels:\n" + wheel_names) | ||||
| 
 | ||||
|     # Check wheels | ||||
|     cmd = ["twine", "check"] + wheel_paths | ||||
|     worker.utils.call(cmd) | ||||
| 
 | ||||
|     # Upload | ||||
|     worker_config = builder.get_worker_config() | ||||
|     env = os.environ.copy() | ||||
|     env["TWINE_USERNAME"] = "__token__" | ||||
|     env["TWINE_PASSWORD"] = worker_config.pypi_token(builder.service_env_id) | ||||
|     env["TWINE_REPOSITORY_URL"] = "https://upload.pypi.org/legacy/" | ||||
| 
 | ||||
|     cmd = ["twine", "upload", "--verbose", "--non-interactive"] + wheel_paths | ||||
|     worker.utils.call(cmd, env=env, dry_run=dry_run) | ||||
							
								
								
									
										175
									
								
								config/worker/deploy/snap.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										175
									
								
								config/worker/deploy/snap.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,175 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import json | ||||
| import os | ||||
| 
 | ||||
| import worker.blender.version | ||||
| import worker.deploy | ||||
| import worker.utils | ||||
| 
 | ||||
| 
 | ||||
| def package(builder: worker.deploy.CodeStoreBuilder) -> None: | ||||
|     dry_run = False | ||||
|     if builder.service_env_id == "LOCAL" and not ( | ||||
|         builder.platform == "linux" and worker.utils.is_tool("snapcraft") | ||||
|     ): | ||||
|         worker.utils.warning("Performing dry run on LOCAL service environment") | ||||
|         dry_run = True | ||||
|     elif not builder.platform == "linux": | ||||
|         raise Exception("Can only run snapcraft on Linux, aborting") | ||||
| 
 | ||||
|     version_info = worker.blender.version.VersionInfo(builder) | ||||
| 
 | ||||
|     needs_stable_grade = version_info.risk_id in ["candidate", "stable"] | ||||
|     grade = "stable" if needs_stable_grade else "devel" | ||||
| 
 | ||||
|     # Clean directory | ||||
|     for old_package_file in builder.store_snap_dir.glob("*.tar.xz"): | ||||
|         worker.utils.remove_file(old_package_file) | ||||
|     os.makedirs(builder.store_snap_dir, exist_ok=True) | ||||
| 
 | ||||
|     # Get input package file path | ||||
|     package_manifest = builder.package_dir / "manifest.json" | ||||
|     builds = json.loads(package_manifest.read_text()) | ||||
|     linux_package_file_path = None | ||||
| 
 | ||||
|     for build in builds: | ||||
|         if build["platform"] == "linux" and build["file_extension"] == "tar.xz": | ||||
|             linux_package_file_path = builder.package_dir / build["file_name"] | ||||
|             break | ||||
|     if not linux_package_file_path: | ||||
|         raise Exception(f"Linux package not found in [{builder.package_dir}] manifest") | ||||
| 
 | ||||
|     source_file_path = linux_package_file_path | ||||
|     dest_file_path = builder.store_snap_dir / linux_package_file_path.name | ||||
|     worker.utils.info(f"Copy file [{source_file_path}] -> [{dest_file_path}]") | ||||
|     worker.utils.copy_file(source_file_path, dest_file_path) | ||||
| 
 | ||||
|     freedesktop_path = builder.code_path / "release" / "freedesktop" | ||||
|     snap_source_root_path = freedesktop_path / "snap" | ||||
| 
 | ||||
|     blender_icon_file_name = "blender.svg" | ||||
|     snapcraft_template_file_path = ( | ||||
|         snap_source_root_path / "blender-snapcraft-template.yaml" | ||||
|     ) | ||||
| 
 | ||||
|     worker.utils.info(f"Using snap config file [{snapcraft_template_file_path}]") | ||||
|     snapcraft_text = snapcraft_template_file_path.read_text() | ||||
|     snapcraft_text = snapcraft_text.replace("@VERSION@", version_info.version) | ||||
|     snapcraft_text = snapcraft_text.replace("@GRADE@", grade) | ||||
|     snapcraft_text = snapcraft_text.replace( | ||||
|         "@ICON_PATH@", f"./{blender_icon_file_name}" | ||||
|     ) | ||||
|     snapcraft_text = snapcraft_text.replace( | ||||
|         "@PACKAGE_PATH@", f"./{linux_package_file_path.name}" | ||||
|     ) | ||||
| 
 | ||||
|     snapcraft_file_path = builder.store_snap_dir / "snapcraft.yaml" | ||||
|     worker.utils.info(f"Saving snapcraft config file [{snapcraft_file_path}]") | ||||
|     snapcraft_file_path.write_text(snapcraft_text) | ||||
|     print(snapcraft_text) | ||||
| 
 | ||||
|     snap_package_file_name = f"blender_{version_info.version}_amd64.snap" | ||||
|     snap_package_file_path = builder.store_snap_dir / snap_package_file_name | ||||
|     if snap_package_file_path.exists(): | ||||
|         worker.utils.info(f"Clearing snap file [{snap_package_file_path}]") | ||||
|         worker.utils.remove_file(snap_package_file_path) | ||||
| 
 | ||||
|     os.chdir(builder.store_snap_dir) | ||||
| 
 | ||||
|     # Copy all required files into working folder | ||||
|     source_file_path = ( | ||||
|         freedesktop_path / "icons" / "scalable" / "apps" / blender_icon_file_name | ||||
|     ) | ||||
|     dest_file_path = builder.store_snap_dir / "blender.svg" | ||||
|     worker.utils.info(f"Copy file [{source_file_path}] -> [{dest_file_path}]") | ||||
|     worker.utils.copy_file(source_file_path, dest_file_path) | ||||
| 
 | ||||
|     source_file_path = snap_source_root_path / "blender-wrapper" | ||||
|     dest_file_path = builder.store_snap_dir / "blender-wrapper" | ||||
|     worker.utils.info(f"Copy file [{source_file_path}] -> [{dest_file_path}]") | ||||
|     worker.utils.copy_file(source_file_path, dest_file_path) | ||||
| 
 | ||||
|     worker.utils.call(["snapcraft", "clean", "--use-lxd"], dry_run=dry_run) | ||||
|     worker.utils.call(["snapcraft", "--use-lxd"], dry_run=dry_run) | ||||
|     worker.utils.call( | ||||
|         ["review-tools.snap-review", snap_package_file_path, "--allow-classic"], | ||||
|         dry_run=dry_run, | ||||
|     ) | ||||
| 
 | ||||
|     if dry_run: | ||||
|         snap_package_file_path.write_text("Dry run dummy package file") | ||||
| 
 | ||||
|     worker.utils.info("To test the snap package run this command") | ||||
|     print("sudo snap remove blender") | ||||
|     print(f"sudo snap install  --dangerous --classic {snap_package_file_path}") | ||||
| 
 | ||||
| 
 | ||||
| def deliver(builder: worker.deploy.CodeStoreBuilder) -> None: | ||||
|     dry_run = False | ||||
|     if builder.service_env_id == "LOCAL": | ||||
|         worker.utils.warning("Performing dry run on LOCAL service environment") | ||||
|         dry_run = True | ||||
|     elif not builder.platform == "linux": | ||||
|         raise Exception("Can only run snapcraft on Linux, aborting") | ||||
| 
 | ||||
|     version_info = worker.blender.version.VersionInfo(builder) | ||||
|     branches_config = builder.get_branches_config() | ||||
|     is_lts = builder.track_id in branches_config.all_lts_tracks | ||||
|     is_latest = ( | ||||
|         branches_config.track_major_minor_versions[builder.track_id] | ||||
|         == version_info.short_version | ||||
|     ) | ||||
| 
 | ||||
|     # Never push to stable | ||||
|     snap_risk_id = version_info.risk_id.replace("stable", "candidate").replace( | ||||
|         "alpha", "edge" | ||||
|     ) | ||||
|     if snap_risk_id == "stable": | ||||
|         raise Exception("Delivery to [stable] channel not allowed") | ||||
| 
 | ||||
|     snap_track_id = version_info.short_version | ||||
| 
 | ||||
|     if is_lts: | ||||
|         snap_track_id += "lts" | ||||
|         needs_release = True | ||||
|     elif is_latest: | ||||
|         # latest/edge always vdev | ||||
|         snap_track_id = "latest" | ||||
|         needs_release = True | ||||
|     else: | ||||
|         # Push current release under development to beta or candidate | ||||
|         needs_release = True | ||||
| 
 | ||||
|     # worker.utils.call(["snapcraft", "list-tracks", "blender"], dry_run=dry_run) | ||||
|     snap_package_file_name = f"blender_{version_info.version}_amd64.snap" | ||||
|     snap_package_file_path = builder.store_snap_dir / snap_package_file_name | ||||
|     if not snap_package_file_path.exists(): | ||||
|         raise Exception(f"Snap file [{snap_package_file_path}] missing") | ||||
| 
 | ||||
|     worker_config = builder.get_worker_config() | ||||
|     env = os.environ.copy() | ||||
|     env["SNAPCRAFT_STORE_CREDENTIALS"] = worker_config.snap_credentials( | ||||
|         builder.service_env_id | ||||
|     ) | ||||
| 
 | ||||
|     # If this fails, then the permissions were not set correcty with acls | ||||
|     worker.utils.call(["snapcraft", "status", "blender"], dry_run=dry_run, env=env) | ||||
| 
 | ||||
|     if needs_release: | ||||
|         # Upload and release. | ||||
|         snap_channel = f"{snap_track_id}/{snap_risk_id}" | ||||
|         cmd = ["snapcraft", "upload", "--release", snap_channel, snap_package_file_path] | ||||
|     else: | ||||
|         # Upload only. | ||||
|         snap_channel = "" | ||||
|         cmd = ["snapcraft", "upload", snap_package_file_path] | ||||
| 
 | ||||
|     # Some api call is making this fail, seems to be status based as we can upload and set channel | ||||
|     worker.utils.call(cmd, retry_count=5, retry_wait_time=120, dry_run=dry_run, env=env) | ||||
| 
 | ||||
|     if needs_release: | ||||
|         worker.utils.info("To test the snap package run this command") | ||||
|         print(f"sudo snap refresh blender --classic --channel {snap_channel}") | ||||
							
								
								
									
										40
									
								
								config/worker/deploy/source.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										40
									
								
								config/worker/deploy/source.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,40 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import os | ||||
| 
 | ||||
| import worker.blender.version | ||||
| import worker.deploy | ||||
| import worker.utils | ||||
| 
 | ||||
| 
 | ||||
| def _package( | ||||
|     builder: worker.deploy.CodeDeployBuilder, needs_complete: bool = False | ||||
| ) -> None: | ||||
|     os.chdir(builder.code_path) | ||||
|     if needs_complete: | ||||
|         worker.utils.call(["make", "source_archive_complete"]) | ||||
|     else: | ||||
|         worker.utils.call(["make", "source_archive"]) | ||||
| 
 | ||||
|     # The make change scripts writes to a different location since 2.83. | ||||
|     for source_file in builder.code_path.glob("blender-*.tar.xz*"): | ||||
|         worker.utils.move(source_file, builder.package_source_dir / source_file.name) | ||||
|     for source_file in builder.track_path.glob("blender-*.tar.xz*"): | ||||
|         worker.utils.move(source_file, builder.package_source_dir / source_file.name) | ||||
| 
 | ||||
| 
 | ||||
| def package(builder: worker.deploy.CodeDeployBuilder) -> None: | ||||
|     print(f"Cleaning path [{builder.package_source_dir}]") | ||||
|     worker.utils.remove_dir(builder.package_source_dir) | ||||
|     os.makedirs(builder.package_source_dir, exist_ok=True) | ||||
| 
 | ||||
|     _package(builder, needs_complete=False) | ||||
| 
 | ||||
|     version_info = worker.blender.version.VersionInfo(builder) | ||||
|     if version_info.patch != 0: | ||||
|         worker.utils.info("Skipping complete source package for patch release") | ||||
|         return | ||||
| 
 | ||||
|     _package(builder, needs_complete=True) | ||||
							
								
								
									
										271
									
								
								config/worker/deploy/steam.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										271
									
								
								config/worker/deploy/steam.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,271 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import json | ||||
| import os | ||||
| import pathlib | ||||
| import time | ||||
| 
 | ||||
| import worker.blender.version | ||||
| import worker.deploy | ||||
| import worker.utils | ||||
| 
 | ||||
| 
 | ||||
| def extract_file( | ||||
|     builder: worker.deploy.CodeStoreBuilder, | ||||
|     source_file_path: pathlib.Path, | ||||
|     platform: str, | ||||
| ) -> None: | ||||
|     worker.utils.info(f"Extracting artifact [{source_file_path}] for Steam") | ||||
|     if not source_file_path.exists(): | ||||
|         raise Exception("File not found, aborting") | ||||
| 
 | ||||
|     dest_extract_path = builder.store_steam_dir / platform | ||||
|     dest_content_path = dest_extract_path / "content" | ||||
|     worker.utils.remove_dir(dest_extract_path) | ||||
|     worker.utils.remove_dir(dest_content_path) | ||||
|     os.makedirs(dest_extract_path, exist_ok=True) | ||||
| 
 | ||||
|     if platform == "linux": | ||||
|         worker.utils.info(f"Extract [{source_file_path}] to [{dest_extract_path}]") | ||||
|         cmd = ["tar", "-xf", source_file_path, "--directory", dest_extract_path] | ||||
|         worker.utils.call(cmd) | ||||
| 
 | ||||
|         # Move any folder there as ./content | ||||
|         for source_content_path in dest_extract_path.iterdir(): | ||||
|             if source_content_path.is_dir(): | ||||
|                 worker.utils.info( | ||||
|                     f"Move [{source_content_path.name}] -> [{dest_content_path}]" | ||||
|                 ) | ||||
|                 worker.utils.move(source_content_path, dest_content_path) | ||||
|                 break | ||||
| 
 | ||||
|     elif platform == "darwin": | ||||
|         source_content_path = dest_extract_path / "Blender" | ||||
|         if source_content_path.exists(): | ||||
|             worker.utils.info(f"Removing [{source_content_path}]") | ||||
|             worker.utils.remove_dir(source_content_path) | ||||
| 
 | ||||
|         image_file_path = source_file_path.with_suffix(".img") | ||||
| 
 | ||||
|         cmd = ["dmg2img", "-v", "-i", source_file_path, "-o", image_file_path] | ||||
|         worker.utils.call(cmd) | ||||
| 
 | ||||
|         cmd = ["7z", "x", f"-o{dest_extract_path}", image_file_path] | ||||
|         worker.utils.call(cmd) | ||||
| 
 | ||||
|         os.makedirs(dest_content_path, exist_ok=True) | ||||
| 
 | ||||
|         worker.utils.remove_file(image_file_path) | ||||
| 
 | ||||
|         worker.utils.info( | ||||
|             f"Move Blender app from [{source_content_path}] -> [{dest_content_path}]" | ||||
|         ) | ||||
|         worker.utils.move( | ||||
|             source_content_path / "Blender.app", dest_content_path / "Blender.app" | ||||
|         ) | ||||
|         worker.utils.remove_dir(source_content_path) | ||||
|     elif platform == "windows": | ||||
|         worker.utils.info(f"Extracting zip file [{source_file_path}]") | ||||
|         cmd = ["7z", "x", f"-o{dest_extract_path}", source_file_path] | ||||
|         worker.utils.call(cmd) | ||||
| 
 | ||||
|         # Move any folder there as ./content | ||||
|         for source_content_path in dest_extract_path.iterdir(): | ||||
|             if source_content_path.is_dir(): | ||||
|                 worker.utils.info( | ||||
|                     f"Move [{source_content_path.name}] -> [{dest_content_path}]" | ||||
|                 ) | ||||
|                 worker.utils.move(source_content_path, dest_content_path) | ||||
|                 break | ||||
|     else: | ||||
|         raise Exception(f"Don't know how to extract for platform [{platform}]") | ||||
| 
 | ||||
| 
 | ||||
| def extract(builder: worker.deploy.CodeStoreBuilder) -> None: | ||||
|     package_manifest = builder.package_dir / "manifest.json" | ||||
|     builds = json.loads(package_manifest.read_text()) | ||||
| 
 | ||||
|     for build in builds: | ||||
|         if build["file_extension"] not in ["zip", "tar.xz", "dmg"]: | ||||
|             continue | ||||
|         if build["architecture"] == "arm64": | ||||
|             continue | ||||
| 
 | ||||
|         file_path = builder.package_dir / build["file_name"] | ||||
|         platform = build["platform"] | ||||
|         extract_file(builder, file_path, platform) | ||||
| 
 | ||||
| 
 | ||||
| def build(builder: worker.deploy.CodeStoreBuilder, is_preview: bool) -> None: | ||||
|     dry_run = False | ||||
|     if builder.service_env_id == "LOCAL": | ||||
|         worker.utils.warning("Performing dry run on LOCAL service environment") | ||||
|         dry_run = True | ||||
| 
 | ||||
|     version_info = worker.blender.version.VersionInfo(builder) | ||||
|     branches_config = builder.get_branches_config() | ||||
|     is_lts = builder.track_id in branches_config.all_lts_tracks | ||||
|     is_latest = ( | ||||
|         branches_config.track_major_minor_versions["vdev"] == version_info.short_version | ||||
|     ) | ||||
| 
 | ||||
|     log_path = builder.track_path / "log" | ||||
|     worker.utils.remove_dir(log_path) | ||||
|     os.makedirs(log_path, exist_ok=True) | ||||
| 
 | ||||
|     worker_config = builder.get_worker_config() | ||||
|     steam_credentials = worker_config.steam_credentials(builder.service_env_id) | ||||
|     steam_user_id, steam_user_password = steam_credentials | ||||
|     if not steam_user_id or not steam_user_password: | ||||
|         if not dry_run: | ||||
|             raise Exception("Steam user id or password not available, aborting") | ||||
| 
 | ||||
|     env = os.environ.copy() | ||||
|     env["PATH"] = env["PATH"] + os.pathsep + "/usr/games" | ||||
| 
 | ||||
|     cmd: worker.utils.CmdSequence = [ | ||||
|         "steamcmd", | ||||
|         "+login", | ||||
|         worker.utils.HiddenArgument(steam_user_id), | ||||
|         worker.utils.HiddenArgument(steam_user_password), | ||||
|         "+quit", | ||||
|     ] | ||||
|     worker.utils.call(cmd, dry_run=dry_run, env=env) | ||||
| 
 | ||||
|     worker.utils.info("Waiting 5 seconds for next steam command") | ||||
|     time.sleep(5.0) | ||||
| 
 | ||||
|     steam_app_id = worker_config.steam_app_id | ||||
|     steam_platform_depot_ids = worker_config.steam_platform_depot_ids | ||||
| 
 | ||||
|     for platform_id in ["linux", "darwin", "windows"]: | ||||
|         worker.utils.info(f"Platform {platform_id}") | ||||
| 
 | ||||
|         platform_depot_id = steam_platform_depot_ids[platform_id] | ||||
| 
 | ||||
|         track_build_root_path = builder.store_steam_dir / platform_id | ||||
|         if not track_build_root_path.exists(): | ||||
|             raise Exception(f"Folder {track_build_root_path} does not exist") | ||||
| 
 | ||||
|         platform_build_file_path = track_build_root_path / "depot_build.vdf" | ||||
| 
 | ||||
|         source_root_path = track_build_root_path / "content" | ||||
|         if not source_root_path.exists(): | ||||
|             raise Exception(f"Folder {source_root_path} does not exist") | ||||
| 
 | ||||
|         dest_root_path = track_build_root_path / "output" | ||||
| 
 | ||||
|         # Steam branches cannot be uper case and no spaces allowed | ||||
|         # Branches are named "daily" and "devtest" on Steam, so rename those. | ||||
|         steam_branch_id = builder.service_env_id.lower() | ||||
|         steam_branch_id = steam_branch_id.replace("prod", "daily") | ||||
|         steam_branch_id = steam_branch_id.replace("uatest", "devtest") | ||||
| 
 | ||||
|         if is_lts: | ||||
|             # daily-X.X and devtest-X.X branches for LTS. | ||||
|             steam_branch_id = f"{steam_branch_id}-{version_info.short_version}" | ||||
|         elif is_latest: | ||||
|             # daily and devtest branches for main without suffix. | ||||
|             pass | ||||
|         else: | ||||
|             # Not setting this live. | ||||
|             steam_branch_id = "" | ||||
| 
 | ||||
|         preview = "1" if is_preview else "0" | ||||
| 
 | ||||
|         app_build_script = f""" | ||||
| "appbuild" | ||||
| {{ | ||||
| 	"appid"  "{steam_app_id}" | ||||
| 	"desc" "Blender {version_info.version}" // description for this build | ||||
| 	"buildoutput" "{dest_root_path}" // build output folder for .log, .csm & .csd files, relative to location of this file | ||||
| 	"contentroot" "{source_root_path}" // root content folder, relative to location of this file | ||||
| 	"setlive"  "{steam_branch_id}" // branch to set live after successful build, non if empty | ||||
| 	"preview" "{preview}" // 1 to enable preview builds, 0 to commit build to steampipe | ||||
| 	"local"  ""  // set to file path of local content server | ||||
| 
 | ||||
| 	"depots" | ||||
| 	{{ | ||||
| 		"{platform_depot_id}" "{platform_build_file_path}" | ||||
| 	}} | ||||
| }} | ||||
| """ | ||||
| 
 | ||||
|         platform_build_script = f""" | ||||
| "DepotBuildConfig" | ||||
| {{ | ||||
| 	// Set your assigned depot ID here | ||||
| 	"DepotID" "{platform_depot_id}" | ||||
| 
 | ||||
| 	// Set a root for all content. | ||||
| 	// All relative paths specified below (LocalPath in FileMapping entries, and FileExclusion paths) | ||||
| 	// will be resolved relative to this root. | ||||
| 	// If you don't define ContentRoot, then it will be assumed to be | ||||
| 	// the location of this script file, which probably isn't what you want | ||||
| 	"ContentRoot"  "{source_root_path}" | ||||
| 
 | ||||
| 	// include all files recursivley | ||||
| 	"FileMapping" | ||||
| 	{{ | ||||
| 	// This can be a full path, or a path relative to ContentRoot | ||||
| 	"LocalPath" "*" | ||||
| 
 | ||||
| 	// This is a path relative to the install folder of your game | ||||
| 	"DepotPath" "." | ||||
| 
 | ||||
| 	// If LocalPath contains wildcards, setting this means that all | ||||
| 	// matching files within subdirectories of LocalPath will also | ||||
| 	// be included. | ||||
| 	"recursive" "1" | ||||
| 	}} | ||||
| 
 | ||||
| 	// but exclude all symbol files | ||||
| 	// This can be a full path, or a path relative to ContentRoot | ||||
| 	//"FileExclusion" "*.pdb" | ||||
| }} | ||||
| """ | ||||
| 
 | ||||
|         (track_build_root_path / "app_build.vdf").write_text(app_build_script) | ||||
|         platform_build_file_path.write_text(platform_build_script) | ||||
| 
 | ||||
|         worker.utils.info( | ||||
|             f"Version [{version_info.version}] for [{platform_id}] in preview [{is_preview}] for steam branch [{steam_branch_id}], building" | ||||
|         ) | ||||
| 
 | ||||
|         cmd = [ | ||||
|             "steamcmd", | ||||
|             "+login", | ||||
|             worker.utils.HiddenArgument(steam_user_id), | ||||
|             worker.utils.HiddenArgument(steam_user_password), | ||||
|             "+run_app_build", | ||||
|             track_build_root_path / "app_build.vdf", | ||||
|             "+quit", | ||||
|         ] | ||||
|         retry_count = 0 if preview else 3 | ||||
| 
 | ||||
|         worker.utils.call( | ||||
|             cmd, retry_count=retry_count, retry_wait_time=120, dry_run=dry_run, env=env | ||||
|         ) | ||||
| 
 | ||||
|         worker.utils.info("Waiting 5 seconds for next steam command") | ||||
|         time.sleep(5.0) | ||||
| 
 | ||||
|         worker.utils.info( | ||||
|             f"Version [{version_info.version}] for [{platform_id}] in preview [{is_preview}] is done, success" | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| def package(builder: worker.deploy.CodeStoreBuilder) -> None: | ||||
|     worker.utils.remove_dir(builder.store_steam_dir) | ||||
|     os.makedirs(builder.store_steam_dir, exist_ok=True) | ||||
| 
 | ||||
|     # Extract and prepare content | ||||
|     extract(builder) | ||||
|     build(builder, is_preview=True) | ||||
| 
 | ||||
| 
 | ||||
| def deliver(builder: worker.deploy.CodeStoreBuilder) -> None: | ||||
|     # This will push to the store | ||||
|     build(builder, is_preview=False) | ||||
							
								
								
									
										122
									
								
								config/worker/deploy/windows.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										122
									
								
								config/worker/deploy/windows.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,122 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import json | ||||
| import os | ||||
| 
 | ||||
| import worker.blender.pack | ||||
| import worker.blender.sign | ||||
| import worker.blender.version | ||||
| import worker.blender.msix_package | ||||
| 
 | ||||
| import worker.deploy | ||||
| import worker.utils | ||||
| 
 | ||||
| 
 | ||||
| def _package_architecture( | ||||
|     builder: worker.deploy.CodeStoreBuilder, architecture: str, dry_run: bool | ||||
| ) -> None: | ||||
|     version_info = worker.blender.version.VersionInfo(builder) | ||||
| 
 | ||||
|     # Revision with MS Store must be set to 0 | ||||
|     revision_id = 0 | ||||
| 
 | ||||
|     branches_config = builder.get_branches_config() | ||||
|     is_lts = builder.track_id in branches_config.windows_store_lts_tracks | ||||
|     base_build_number = 0 | ||||
| 
 | ||||
|     build_number = version_info.patch + base_build_number | ||||
|     worker.utils.info(f"Builder number {build_number}") | ||||
| 
 | ||||
|     store_version_id = f"{version_info.short_version}.{build_number}.{revision_id}" | ||||
|     worker.utils.info(f"Store version ID {store_version_id}") | ||||
| 
 | ||||
|     worker.utils.info(f"Cleaning path [{builder.store_windows_dir}]") | ||||
|     worker.utils.remove_dir(builder.store_windows_dir) | ||||
|     os.makedirs(builder.store_windows_dir, exist_ok=True) | ||||
| 
 | ||||
|     os.chdir(builder.store_windows_dir) | ||||
| 
 | ||||
|     # Find input zip package. | ||||
|     package_manifest = builder.package_dir / "manifest.json" | ||||
|     builds = json.loads(package_manifest.read_text()) | ||||
|     input_file_path = None | ||||
| 
 | ||||
|     for build in builds: | ||||
|         if ( | ||||
|             build["platform"] == "windows" | ||||
|             and build["file_extension"] == "zip" | ||||
|             and build["architecture"] == architecture | ||||
|         ): | ||||
|             input_file_path = builder.package_dir / build["file_name"] | ||||
|             break | ||||
|     if not input_file_path: | ||||
|         raise Exception( | ||||
|             f"Windows package not found in [{builder.package_dir}] manifest" | ||||
|         ) | ||||
| 
 | ||||
|     # Copy all required files into working folder | ||||
|     source_path = builder.code_path / "release" / "windows" / "msix" | ||||
|     dest_path = builder.store_windows_dir | ||||
|     worker.utils.info( | ||||
|         f"Copying [{source_path}] -> [{dest_path}] for windows store packaging" | ||||
|     ) | ||||
| 
 | ||||
|     for source_file in source_path.iterdir(): | ||||
|         if source_file.name == "README.md": | ||||
|             continue | ||||
|         if source_file.is_dir(): | ||||
|             worker.utils.copy_dir(source_file, dest_path / source_file.name) | ||||
|         else: | ||||
|             worker.utils.copy_file(source_file, dest_path / source_file.name) | ||||
| 
 | ||||
|     worker_config = builder.get_worker_config() | ||||
| 
 | ||||
|     cert_subject = worker_config.windows_store_certificate(builder.service_env_id) | ||||
|     certificate_id = f"CN={cert_subject}" | ||||
| 
 | ||||
|     msix_filepath = worker.blender.msix_package.pack( | ||||
|         store_version_id, input_file_path, certificate_id, lts=is_lts, dry_run=dry_run | ||||
|     ) | ||||
| 
 | ||||
|     if worker_config.windows_store_self_sign: | ||||
|         worker.blender.sign.sign_windows_files( | ||||
|             builder.service_env_id, [msix_filepath], certificate_id=certificate_id | ||||
|         ) | ||||
| 
 | ||||
|     if dry_run: | ||||
|         msix_filepath.write_text("Dry run dummy package file") | ||||
| 
 | ||||
|     # Clear out all msix files first | ||||
|     for old_msix_filepath in builder.package_dir.glob("*.msix"): | ||||
|         worker.utils.remove_file(old_msix_filepath) | ||||
| 
 | ||||
|     dest_path = builder.package_dir / msix_filepath.name | ||||
|     worker.utils.info(f"Copying [{msix_filepath}] -> [{dest_path}] for distribution") | ||||
|     worker.utils.copy_file(msix_filepath, dest_path) | ||||
|     worker.blender.pack.generate_file_hash(dest_path) | ||||
| 
 | ||||
| 
 | ||||
| def package(builder: worker.deploy.CodeStoreBuilder) -> None: | ||||
|     dry_run = False | ||||
|     if not builder.platform == "windows": | ||||
|         if builder.service_env_id == "LOCAL": | ||||
|             worker.utils.warning("Performing dry run on LOCAL service environment") | ||||
|             dry_run = True | ||||
|         else: | ||||
|             raise Exception("Can only run this on Windows, aborting") | ||||
| 
 | ||||
|     branches_config = builder.get_branches_config() | ||||
|     expected_platforms = branches_config.code_official_platform_architectures[ | ||||
|         builder.track_id | ||||
|     ] | ||||
| 
 | ||||
|     for expected_platform in expected_platforms: | ||||
|         if expected_platform.startswith("windows"): | ||||
|             architecture = expected_platform.split("-")[1] | ||||
|             _package_architecture(builder, architecture, dry_run) | ||||
| 
 | ||||
| 
 | ||||
| def deliver(builder: worker.deploy.CodeStoreBuilder) -> None: | ||||
|     worker.utils.info("Windows store delivery not implemented") | ||||
							
								
								
									
										247
									
								
								config/worker/doc_api.py
									
										
									
									
									
										Executable file
									
								
							
							
						
						
									
										247
									
								
								config/worker/doc_api.py
									
										
									
									
									
										Executable file
									
								
							|  | @ -0,0 +1,247 @@ | |||
| #!/usr/bin/env python3 | ||||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import argparse | ||||
| import os | ||||
| import pathlib | ||||
| import sys | ||||
| 
 | ||||
| from collections import OrderedDict | ||||
| 
 | ||||
| sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent)) | ||||
| 
 | ||||
| import worker.configure | ||||
| import worker.utils | ||||
| 
 | ||||
| import worker.blender | ||||
| import worker.blender.compile | ||||
| import worker.blender.update | ||||
| import worker.blender.version | ||||
| 
 | ||||
| 
 | ||||
| class DocApiBuilder(worker.blender.CodeBuilder): | ||||
|     def __init__(self, args: argparse.Namespace): | ||||
|         super().__init__(args) | ||||
|         self.needs_package_delivery = args.needs_package_delivery | ||||
|         self.setup_track_path() | ||||
| 
 | ||||
| 
 | ||||
| def download_api_dump_test_data(local_delivery_path: pathlib.Path) -> None: | ||||
|     import urllib.request | ||||
|     import json | ||||
| 
 | ||||
|     api_base_url = "https://docs.blender.org/api" | ||||
|     api_dump_index_url = f"{api_base_url}/api_dump_index.json" | ||||
| 
 | ||||
|     request = urllib.request.Request( | ||||
|         api_dump_index_url, headers={"User-Agent": "Mozilla"} | ||||
|     ) | ||||
|     response = urllib.request.urlopen(request, timeout=5.0) | ||||
| 
 | ||||
|     api_dump_index_text = response.read().decode("utf-8", "ignore") | ||||
|     api_dump_index_path = local_delivery_path / "api_dump_index.json" | ||||
|     os.makedirs(api_dump_index_path.parent, exist_ok=True) | ||||
|     api_dump_index_path.write_text(api_dump_index_text) | ||||
| 
 | ||||
|     api_dump_index = json.loads(api_dump_index_text) | ||||
|     for version in api_dump_index.keys(): | ||||
|         api_dump_url = f"{api_base_url}/{version}/api_dump.json" | ||||
|         worker.utils.info(f"Download {api_dump_url}") | ||||
| 
 | ||||
|         request = urllib.request.Request( | ||||
|             api_dump_url, headers={"User-Agent": "Mozilla"} | ||||
|         ) | ||||
|         response = urllib.request.urlopen(request, timeout=5.0) | ||||
| 
 | ||||
|         api_dump_text = response.read().decode("utf-8", "ignore") | ||||
|         api_dump_path = local_delivery_path / version / "api_dump.json" | ||||
|         os.makedirs(api_dump_path.parent, exist_ok=True) | ||||
|         api_dump_path.write_text(api_dump_text) | ||||
| 
 | ||||
| 
 | ||||
| def compile_doc(builder: DocApiBuilder) -> None: | ||||
|     # Install requirements | ||||
|     os.chdir(builder.track_path) | ||||
|     doc_api_script_path = builder.code_path / "doc" / "python_api" | ||||
|     worker.utils.call_pipenv( | ||||
|         ["install", "--requirements", doc_api_script_path / "requirements.txt"] | ||||
|     ) | ||||
| 
 | ||||
|     # Clean build directory | ||||
|     worker.utils.remove_dir(builder.build_doc_path) | ||||
|     os.makedirs(builder.build_doc_path, exist_ok=True) | ||||
| 
 | ||||
|     os.chdir(doc_api_script_path) | ||||
| 
 | ||||
|     # Get API dumps data from server. | ||||
|     api_dump_build_path = builder.build_doc_path / "api_dump" | ||||
|     os.makedirs(api_dump_build_path, exist_ok=True) | ||||
| 
 | ||||
|     api_dump_include_paths = ["api_dump_index.json", "*/", "api_dump.json"] | ||||
|     api_dump_build_path_index = api_dump_build_path / "api_dump_index.json" | ||||
| 
 | ||||
|     worker_config = builder.get_worker_config() | ||||
|     connect_id = f"{worker_config.docs_user}@{worker_config.docs_machine}" | ||||
|     remote_path = ( | ||||
|         pathlib.Path(worker_config.docs_folder) | ||||
|         / "docs.blender.org" | ||||
|         / "htdocs" | ||||
|         / builder.service_env_id | ||||
|         / "api" | ||||
|     ) | ||||
| 
 | ||||
|     # Get data from docs.blender.org for local testing. | ||||
|     if builder.service_env_id == "LOCAL": | ||||
|         worker.utils.info("Downloading API dump data from docs.blender.org for testing") | ||||
|         download_api_dump_test_data(remote_path) | ||||
| 
 | ||||
|     source_path = f"{connect_id}:{remote_path}/" | ||||
|     dest_path = api_dump_build_path | ||||
| 
 | ||||
|     worker.utils.rsync( | ||||
|         source_path, | ||||
|         dest_path, | ||||
|         include_paths=api_dump_include_paths, | ||||
|         exclude_paths=["*"], | ||||
|     ) | ||||
| 
 | ||||
|     version = worker.blender.version.VersionInfo(builder).short_version | ||||
|     api_dump_build_path_current_version = api_dump_build_path / version | ||||
|     os.makedirs(api_dump_build_path_current_version, exist_ok=True) | ||||
| 
 | ||||
|     # Generate API docs | ||||
|     cmd = [ | ||||
|         builder.blender_command_path(), | ||||
|         "--background", | ||||
|         "--factory-startup", | ||||
|         "-noaudio", | ||||
|         "--python", | ||||
|         doc_api_script_path / "sphinx_doc_gen.py", | ||||
|         "--", | ||||
|         "--output", | ||||
|         builder.build_doc_path, | ||||
|         "--api-changelog-generate", | ||||
|         "--api-dump-index-path", | ||||
|         api_dump_build_path_index, | ||||
|     ] | ||||
|     worker.utils.call(cmd) | ||||
| 
 | ||||
|     num_threads = worker.configure.get_thread_count(thread_memory_in_GB=1.25) | ||||
| 
 | ||||
|     in_path = builder.build_doc_path / "sphinx-in" | ||||
|     out_path = builder.build_doc_path / "sphinx-out-html" | ||||
|     worker.utils.call( | ||||
|         ["sphinx-build", "-b", "html", "-j", str(num_threads), in_path, out_path] | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def package(builder: DocApiBuilder) -> None: | ||||
|     os.chdir(builder.build_doc_path) | ||||
| 
 | ||||
|     version = worker.blender.version.VersionInfo(builder).short_version | ||||
|     version_file_label = version.replace(".", "_") | ||||
| 
 | ||||
|     package_name = f"blender_python_reference_{version_file_label}" | ||||
|     package_file_name = f"{package_name}.zip" | ||||
| 
 | ||||
|     cmd = ["7z", "a", "-tzip", package_file_name, "./sphinx-out-html", "-r"] | ||||
|     worker.utils.call(cmd) | ||||
| 
 | ||||
|     cmd = ["7z", "rn", package_file_name, "sphinx-out-html", package_name] | ||||
|     worker.utils.call(cmd) | ||||
| 
 | ||||
| 
 | ||||
| def deliver(builder: DocApiBuilder) -> None: | ||||
|     # Get versions | ||||
|     branches_config = builder.get_branches_config() | ||||
|     version = worker.blender.version.VersionInfo(builder).short_version | ||||
|     dev_version = branches_config.track_major_minor_versions["vdev"] | ||||
|     latest_version = branches_config.doc_stable_major_minor_version | ||||
| 
 | ||||
|     # Get remote path | ||||
|     worker_config = builder.get_worker_config() | ||||
|     connect_id = f"{worker_config.docs_user}@{worker_config.docs_machine}" | ||||
|     remote_path = ( | ||||
|         pathlib.Path(worker_config.docs_folder) | ||||
|         / "docs.blender.org" | ||||
|         / "htdocs" | ||||
|         / builder.service_env_id | ||||
|         / "api" | ||||
|     ) | ||||
| 
 | ||||
|     version_remote_path = remote_path / version | ||||
|     worker.utils.call_ssh(connect_id, ["mkdir", "-p", version_remote_path]) | ||||
| 
 | ||||
|     change_modes = ["D0755", "F0644"] | ||||
| 
 | ||||
|     # Sync HTML files | ||||
|     source_path = f"{builder.build_doc_path}/sphinx-out-html/" | ||||
|     dest_path = f"{connect_id}:{version_remote_path}/" | ||||
|     worker.utils.rsync( | ||||
|         source_path, dest_path, exclude_paths=[".doctrees"], change_modes=change_modes | ||||
|     ) | ||||
| 
 | ||||
|     # Put API dumps data on the server. | ||||
|     api_dump_build_path = f"{builder.build_doc_path}/api_dump/" | ||||
|     api_dump_dest_path = f"{connect_id}:{remote_path}/" | ||||
|     worker.utils.rsync( | ||||
|         api_dump_build_path, api_dump_dest_path, change_modes=change_modes | ||||
|     ) | ||||
| 
 | ||||
|     # Sync zip package | ||||
|     if builder.needs_package_delivery: | ||||
|         version_file_label = version.replace(".", "_") | ||||
| 
 | ||||
|         package_name = f"blender_python_reference_{version_file_label}" | ||||
|         package_file_name = f"{package_name}.zip" | ||||
| 
 | ||||
|         source_file_path = builder.build_doc_path / package_file_name | ||||
|         dest_file_path = f"{connect_id}:{version_remote_path}/{package_file_name}" | ||||
|         worker.utils.rsync( | ||||
|             source_file_path, | ||||
|             dest_file_path, | ||||
|             exclude_paths=[".doctrees"], | ||||
|             change_modes=change_modes, | ||||
|         ) | ||||
| 
 | ||||
|     # Create links | ||||
|     if builder.track_id == "vdev": | ||||
|         worker.utils.call_ssh( | ||||
|             connect_id, ["ln", "-svF", remote_path / dev_version, remote_path / "dev"] | ||||
|         ) | ||||
|         worker.utils.call_ssh( | ||||
|             connect_id, | ||||
|             ["ln", "-svF", remote_path / dev_version, remote_path / "master"], | ||||
|         ) | ||||
|         worker.utils.call_ssh( | ||||
|             connect_id, ["ln", "-svF", remote_path / dev_version, remote_path / "main"] | ||||
|         ) | ||||
|         worker.utils.call_ssh( | ||||
|             connect_id, | ||||
|             ["ln", "-svF", remote_path / latest_version, remote_path / "latest"], | ||||
|         ) | ||||
|         worker.utils.call_ssh( | ||||
|             connect_id, | ||||
|             ["ln", "-svF", remote_path / latest_version, remote_path / "current"], | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == "__main__": | ||||
|     steps: worker.utils.BuilderSteps = OrderedDict() | ||||
|     steps["configure-machine"] = worker.configure.configure_machine | ||||
|     steps["update-code"] = worker.blender.update.update | ||||
|     steps["compile-code"] = worker.blender.compile.compile_code | ||||
|     steps["compile-install"] = worker.blender.compile.compile_install | ||||
|     steps["compile"] = compile_doc | ||||
|     steps["package"] = package | ||||
|     steps["deliver"] = deliver | ||||
|     steps["clean"] = worker.blender.CodeBuilder.clean | ||||
| 
 | ||||
|     parser = worker.blender.create_argument_parser(steps=steps) | ||||
|     parser.add_argument("--needs-package-delivery", action="store_true", required=False) | ||||
| 
 | ||||
|     args = parser.parse_args() | ||||
|     builder = DocApiBuilder(args) | ||||
|     builder.run(args.step, steps) | ||||
							
								
								
									
										83
									
								
								config/worker/doc_developer.py
									
										
									
									
									
										Executable file
									
								
							
							
						
						
									
										83
									
								
								config/worker/doc_developer.py
									
										
									
									
									
										Executable file
									
								
							|  | @ -0,0 +1,83 @@ | |||
| #!/usr/bin/env python3 | ||||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import argparse | ||||
| import os | ||||
| import pathlib | ||||
| import sys | ||||
| 
 | ||||
| from collections import OrderedDict | ||||
| 
 | ||||
| sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent)) | ||||
| 
 | ||||
| import worker.utils | ||||
| 
 | ||||
| 
 | ||||
| class DocDeveloperBuilder(worker.utils.Builder): | ||||
|     def __init__(self, args: argparse.Namespace): | ||||
|         super().__init__(args, "blender", "blender-developer-docs") | ||||
|         self.build_path = self.track_path / "build_developer_docs" | ||||
|         self.output_path = self.build_path / "html" | ||||
|         self.setup_track_path() | ||||
| 
 | ||||
| 
 | ||||
| def update(builder: DocDeveloperBuilder) -> None: | ||||
|     builder.update_source() | ||||
| 
 | ||||
| 
 | ||||
| def compile_doc(builder: DocDeveloperBuilder) -> None: | ||||
|     os.chdir(builder.track_path) | ||||
|     worker.utils.call_pipenv( | ||||
|         ["install", "--requirements", builder.code_path / "requirements.txt"] | ||||
|     ) | ||||
| 
 | ||||
|     worker.utils.remove_dir(builder.output_path) | ||||
| 
 | ||||
|     os.makedirs(builder.build_path, exist_ok=True) | ||||
|     os.chdir(builder.build_path) | ||||
| 
 | ||||
|     mkdocs_yml_path = builder.code_path / "mkdocs.yml" | ||||
|     worker.utils.call_pipenv( | ||||
|         ["run", "mkdocs", "build", "-f", mkdocs_yml_path, "-d", builder.output_path] | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| def deliver(builder: DocDeveloperBuilder) -> None: | ||||
|     worker_config = builder.get_worker_config() | ||||
| 
 | ||||
|     remote_path = f"developer.blender.org/webroot/{builder.service_env_id}/docs" | ||||
| 
 | ||||
|     connect_id = f"{worker_config.docs_user}@{worker_config.docs_machine}" | ||||
|     server_docs_path = pathlib.Path(worker_config.docs_folder) / pathlib.Path( | ||||
|         remote_path | ||||
|     ) | ||||
| 
 | ||||
|     change_modes = ["D0755", "F0644"] | ||||
|     source_path = f"{builder.output_path}/" | ||||
|     dest_path = f"{connect_id}:{server_docs_path}/" | ||||
| 
 | ||||
|     worker.utils.call_ssh(connect_id, ["mkdir", "-p", server_docs_path]) | ||||
|     worker.utils.rsync( | ||||
|         source_path, | ||||
|         dest_path, | ||||
|         change_modes=change_modes, | ||||
|         port=worker_config.docs_port, | ||||
|         delete=True, | ||||
|         delete_path_check=f"/developer.blender.org/webroot/{builder.service_env_id}/docs", | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == "__main__": | ||||
|     steps: worker.utils.BuilderSteps = OrderedDict() | ||||
|     steps["update"] = update | ||||
|     steps["compile"] = compile_doc | ||||
|     steps["deliver"] = deliver | ||||
| 
 | ||||
|     parser = worker.utils.create_argument_parser(steps=steps) | ||||
|     parser.add_argument("--needs-package-delivery", action="store_true", required=False) | ||||
| 
 | ||||
|     args = parser.parse_args() | ||||
|     builder = DocDeveloperBuilder(args) | ||||
|     builder.run(args.step, steps) | ||||
							
								
								
									
										310
									
								
								config/worker/doc_manual.py
									
										
									
									
									
										Executable file
									
								
							
							
						
						
									
										310
									
								
								config/worker/doc_manual.py
									
										
									
									
									
										Executable file
									
								
							|  | @ -0,0 +1,310 @@ | |||
| #!/usr/bin/env python3 | ||||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import argparse | ||||
| import json | ||||
| import os | ||||
| import pathlib | ||||
| import re | ||||
| import sys | ||||
| import time | ||||
| 
 | ||||
| from collections import OrderedDict | ||||
| from datetime import timedelta | ||||
| from typing import Optional, Sequence | ||||
| 
 | ||||
| sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent)) | ||||
| 
 | ||||
| import worker.configure | ||||
| import worker.utils | ||||
| 
 | ||||
| 
 | ||||
| class ManualBuilder(worker.utils.Builder): | ||||
|     def __init__(self, args: argparse.Namespace): | ||||
|         super().__init__(args, "blender", "blender-manual") | ||||
|         self.needs_all_locales = args.needs_all_locales | ||||
|         self.needs_package_delivery = args.needs_package_delivery | ||||
|         self.doc_format = args.doc_format | ||||
|         self.build_path = self.track_path / "build" | ||||
|         self.setup_track_path() | ||||
| 
 | ||||
|     def get_locales(self) -> Sequence[str]: | ||||
|         locales = ["en"] | ||||
|         if self.needs_all_locales: | ||||
|             locale_path = self.code_path / "locale" | ||||
|             locales += [ | ||||
|                 item.name | ||||
|                 for item in locale_path.iterdir() | ||||
|                 if not item.name.startswith(".") | ||||
|             ] | ||||
|         return locales | ||||
| 
 | ||||
| 
 | ||||
| def update(builder: ManualBuilder) -> None: | ||||
|     builder.update_source() | ||||
|     if builder.needs_all_locales: | ||||
|         worker.utils.update_source( | ||||
|             "blender", "blender-manual-translations", builder.code_path / "locale" | ||||
|         ) | ||||
| 
 | ||||
| 
 | ||||
| def check(builder: ManualBuilder) -> None: | ||||
|     os.chdir(builder.track_path) | ||||
|     worker.utils.call_pipenv( | ||||
|         ["install", "--pre", "--requirements", builder.code_path / "requirements.txt"] | ||||
|     ) | ||||
| 
 | ||||
|     os.chdir(builder.code_path) | ||||
| 
 | ||||
|     make_cmd = "make.bat" if builder.platform == "windows" else "make" | ||||
|     worker.utils.call_pipenv(["run", make_cmd, "check_structure"]) | ||||
|     # worker.utils.call_pipenv(["run", make_cmd, "check_syntax"]) | ||||
|     # worker.utils.call_pipenv(["run", make_cmd, "check_spelling"]) | ||||
| 
 | ||||
| 
 | ||||
| def compile_doc(builder: ManualBuilder) -> None: | ||||
|     # Install requirements. | ||||
|     os.chdir(builder.track_path) | ||||
|     worker.utils.call_pipenv( | ||||
|         ["install", "--pre", "--requirements", builder.code_path / "requirements.txt"] | ||||
|     ) | ||||
| 
 | ||||
|     # Determine format and locales | ||||
|     locales = builder.get_locales() | ||||
|     doc_format = builder.doc_format | ||||
| 
 | ||||
|     # Clean build folder | ||||
|     worker.utils.remove_dir(builder.build_path) | ||||
|     os.makedirs(builder.build_path, exist_ok=True) | ||||
|     os.chdir(builder.code_path) | ||||
| 
 | ||||
|     branches_config = builder.get_branches_config() | ||||
| 
 | ||||
|     # Check manual version matches track. | ||||
|     conf_file_path = builder.code_path / "manual" / "conf.py" | ||||
|     conf_text = conf_file_path.read_text() | ||||
|     match = re.search(r"blender_version\s*=\s*['\"](.*)['\"]", conf_text) | ||||
|     expected_version = branches_config.track_major_minor_versions[builder.track_id] | ||||
|     found_version = match.groups(0)[0] if match else "nothing" | ||||
|     if found_version != expected_version: | ||||
|         raise Exception( | ||||
|             f"Expected blender_version {expected_version}, but found {found_version} in manual/conf.py" | ||||
|         ) | ||||
| 
 | ||||
|     def filter_output(line: str) -> Optional[str]: | ||||
|         if line.find("WARNING: unknown mimetype for .doctrees") != -1: | ||||
|             return None | ||||
|         elif line.find("copying images...") != -1: | ||||
|             return None | ||||
|         return line | ||||
| 
 | ||||
|     # Generate manual | ||||
|     for locale in locales: | ||||
|         start_timestamp = time.time() | ||||
|         worker.utils.info(f"Generating {locale} in {doc_format}") | ||||
| 
 | ||||
|         num_threads = worker.configure.get_thread_count(thread_memory_in_GB=1.25) | ||||
| 
 | ||||
|         os.chdir(builder.code_path) | ||||
|         build_output_path = builder.build_path / doc_format / locale | ||||
| 
 | ||||
|         worker.utils.call_pipenv( | ||||
|             [ | ||||
|                 "run", | ||||
|                 "sphinx-build", | ||||
|                 "-b", | ||||
|                 doc_format, | ||||
|                 "-j", | ||||
|                 str(num_threads), | ||||
|                 "-D", | ||||
|                 f"language={locale}", | ||||
|                 "./manual", | ||||
|                 build_output_path, | ||||
|             ], | ||||
|             filter_output=filter_output, | ||||
|         ) | ||||
| 
 | ||||
|         if doc_format == "epub": | ||||
|             if not build_output_path.rglob("*.epub"): | ||||
|                 raise Exception(f"Expected epub files missing in {build_output_path}") | ||||
| 
 | ||||
|         # Hack appropriate versions.json URL into version_switch.js | ||||
|         worker.utils.info("Replacing URL in version_switch.js") | ||||
| 
 | ||||
|         version_switch_file_path = ( | ||||
|             build_output_path / "_static" / "js" / "version_switch.js" | ||||
|         ) | ||||
|         versions_file_url = ( | ||||
|             f"https://docs.blender.org/{builder.service_env_id}/versions.json" | ||||
|         ) | ||||
| 
 | ||||
|         version_switch_text = version_switch_file_path.read_text() | ||||
|         version_switch_text = version_switch_text.replace( | ||||
|             "https://docs.blender.org/versions.json", versions_file_url | ||||
|         ) | ||||
|         version_switch_text = version_switch_text.replace( | ||||
|             "https://docs.blender.org/PROD/versions.json", versions_file_url | ||||
|         ) | ||||
|         version_switch_text = version_switch_text.replace( | ||||
|             "https://docs.blender.org/UATEST/versions.json", versions_file_url | ||||
|         ) | ||||
|         version_switch_file_path.write_text(version_switch_text) | ||||
| 
 | ||||
|         time_total = time.time() - start_timestamp | ||||
|         time_delta = str(timedelta(seconds=time_total)) | ||||
|         worker.utils.info(f"Generated {locale} in {doc_format} in {time_delta}") | ||||
| 
 | ||||
| 
 | ||||
| def package(builder: ManualBuilder) -> None: | ||||
|     if not builder.needs_package_delivery: | ||||
|         worker.utils.info("No package delivery needed, skipping packaging") | ||||
|         return | ||||
| 
 | ||||
|     locales = builder.get_locales() | ||||
|     doc_format = builder.doc_format | ||||
| 
 | ||||
|     os.chdir(builder.build_path) | ||||
| 
 | ||||
|     compression_option = ""  # "-mx=9" | ||||
|     package_file_name = f"blender_manual_{doc_format}.zip" | ||||
| 
 | ||||
|     build_package_path = builder.build_path / "package" | ||||
| 
 | ||||
|     for locale in locales: | ||||
|         package_file_path = build_package_path / locale / package_file_name | ||||
|         worker.utils.remove_file(package_file_path) | ||||
| 
 | ||||
|         source_path = f"{doc_format}/{locale}" | ||||
| 
 | ||||
|         cmd = [ | ||||
|             "7z", | ||||
|             "a", | ||||
|             "-tzip", | ||||
|             package_file_path, | ||||
|             source_path, | ||||
|             "-r", | ||||
|             "-xr!.doctrees", | ||||
|             compression_option, | ||||
|         ] | ||||
|         worker.utils.call(cmd) | ||||
| 
 | ||||
|         cmd = [ | ||||
|             "7z", | ||||
|             "rn", | ||||
|             package_file_path, | ||||
|             source_path, | ||||
|             f"blender_manual_{builder.track_id}_{locale}.{doc_format}", | ||||
|         ] | ||||
|         worker.utils.call(cmd) | ||||
| 
 | ||||
| 
 | ||||
| def deliver(builder: ManualBuilder) -> None: | ||||
|     locales = builder.get_locales() | ||||
|     doc_format = builder.doc_format | ||||
| 
 | ||||
|     # Get versions | ||||
|     branches_config = builder.get_branches_config() | ||||
|     version = branches_config.track_major_minor_versions[builder.track_id] | ||||
|     dev_version = branches_config.track_major_minor_versions["vdev"] | ||||
|     latest_version = branches_config.doc_stable_major_minor_version | ||||
| 
 | ||||
|     # Get remote paths | ||||
|     worker_config = builder.get_worker_config() | ||||
|     connect_id = f"{worker_config.docs_user}@{worker_config.docs_machine}" | ||||
|     docs_remote_path = ( | ||||
|         pathlib.Path(worker_config.docs_folder) | ||||
|         / "docs.blender.org" | ||||
|         / "htdocs" | ||||
|         / builder.service_env_id | ||||
|     ) | ||||
| 
 | ||||
|     # Sync each locale | ||||
|     for locale in locales: | ||||
|         worker.utils.info(f"Syncing {locale}") | ||||
| 
 | ||||
|         # Create directory | ||||
|         remote_path = docs_remote_path / "manual" / locale | ||||
|         version_remote_path = remote_path / version | ||||
|         worker.utils.call_ssh(connect_id, ["mkdir", "-p", version_remote_path]) | ||||
| 
 | ||||
|         if doc_format == "html": | ||||
|             # Sync html files | ||||
|             source_path = f"{builder.build_path}/{doc_format}/{locale}/" | ||||
|             dest_path = f"{connect_id}:{version_remote_path}/" | ||||
|             # Exclude packaged download files; these get synced with `needs_package_delivery`. | ||||
|             worker.utils.rsync( | ||||
|                 source_path, | ||||
|                 dest_path, | ||||
|                 exclude_paths=[".doctrees", "blender_manual_*.zip"], | ||||
|                 delete=True, | ||||
|                 delete_path_check=str(version_remote_path), | ||||
|             ) | ||||
| 
 | ||||
|             # Create links | ||||
|             if builder.track_id == "vdev": | ||||
|                 worker.utils.info(f"Creating links for {locale}") | ||||
|                 worker.utils.call_ssh( | ||||
|                     connect_id, | ||||
|                     ["ln", "-svF", remote_path / dev_version, remote_path / "dev"], | ||||
|                 ) | ||||
|                 worker.utils.call_ssh( | ||||
|                     connect_id, | ||||
|                     [ | ||||
|                         "ln", | ||||
|                         "-svF", | ||||
|                         remote_path / latest_version, | ||||
|                         remote_path / "latest", | ||||
|                     ], | ||||
|                 ) | ||||
| 
 | ||||
|         if builder.needs_package_delivery: | ||||
|             # Sync zip package | ||||
|             worker.utils.info(f"Syncing package for {locale}") | ||||
|             build_package_path = builder.build_path / "package" | ||||
|             package_file_name = f"blender_manual_{doc_format}.zip" | ||||
|             source_path = build_package_path / locale / package_file_name | ||||
|             dest_path = f"{connect_id}:{version_remote_path}/{package_file_name}" | ||||
|             worker.utils.rsync(source_path, dest_path, exclude_paths=[".doctrees"]) | ||||
| 
 | ||||
|     # Create and sync versions.json | ||||
|     worker.utils.info("Creating and syncing versions.json") | ||||
| 
 | ||||
|     doc_version_labels = branches_config.doc_manual_version_labels | ||||
|     versions_path = builder.build_path / "versions.json" | ||||
|     versions_path.write_text(json.dumps(doc_version_labels, indent=2)) | ||||
|     worker.utils.info(versions_path.read_text()) | ||||
| 
 | ||||
|     dest_path = f"{connect_id}:{docs_remote_path}/versions.json" | ||||
|     worker.utils.rsync(versions_path, dest_path) | ||||
| 
 | ||||
| 
 | ||||
| def clean(builder: ManualBuilder) -> None: | ||||
|     worker.utils.remove_dir(builder.build_path) | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == "__main__": | ||||
|     steps: worker.utils.BuilderSteps = OrderedDict() | ||||
|     steps["configure-machine"] = worker.configure.configure_machine | ||||
|     steps["update"] = update | ||||
|     steps["check"] = check | ||||
|     steps["compile"] = compile_doc | ||||
|     steps["package"] = package | ||||
|     steps["deliver"] = deliver | ||||
|     steps["clean"] = clean | ||||
| 
 | ||||
|     parser = worker.utils.create_argument_parser(steps=steps) | ||||
|     parser.add_argument("--needs-all-locales", action="store_true", required=False) | ||||
|     parser.add_argument("--needs-package-delivery", action="store_true", required=False) | ||||
|     parser.add_argument( | ||||
|         "--doc-format", | ||||
|         default="html", | ||||
|         type=str, | ||||
|         required=False, | ||||
|         choices=["html", "epub"], | ||||
|     ) | ||||
| 
 | ||||
|     args = parser.parse_args() | ||||
|     builder = ManualBuilder(args) | ||||
|     builder.run(args.step, steps) | ||||
							
								
								
									
										98
									
								
								config/worker/doc_studio.py
									
										
									
									
									
										Executable file
									
								
							
							
						
						
									
										98
									
								
								config/worker/doc_studio.py
									
										
									
									
									
										Executable file
									
								
							|  | @ -0,0 +1,98 @@ | |||
| #!/usr/bin/env python3 | ||||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| import argparse | ||||
| import os | ||||
| import pathlib | ||||
| import sys | ||||
| 
 | ||||
| from collections import OrderedDict | ||||
| 
 | ||||
| sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent)) | ||||
| 
 | ||||
| import worker.utils | ||||
| 
 | ||||
| 
 | ||||
| class DocStudioBuilder(worker.utils.Builder): | ||||
|     def __init__(self, args: argparse.Namespace): | ||||
|         super().__init__(args, "studio", "blender-studio-tools") | ||||
|         self.setup_track_path() | ||||
| 
 | ||||
| 
 | ||||
| def update(builder: worker.utils.Builder) -> None: | ||||
|     builder.update_source(update_submodules=True) | ||||
| 
 | ||||
| 
 | ||||
| def compile_doc(builder: worker.utils.Builder) -> None: | ||||
|     docs_path = builder.code_path / "docs" | ||||
|     os.chdir(docs_path) | ||||
| 
 | ||||
|     worker.utils.call(["npm", "install"]) | ||||
|     worker.utils.call(["npm", "run", "docs:build"]) | ||||
| 
 | ||||
| 
 | ||||
| def deliver(builder: worker.utils.Builder) -> None: | ||||
|     dry_run = False | ||||
|     if builder.service_env_id not in ("PROD", "LOCAL"): | ||||
|         worker.utils.warning("Delivery from non-PROD is dry run only") | ||||
|         dry_run = True | ||||
| 
 | ||||
|     worker_config = builder.get_worker_config() | ||||
|     connect_id = f"{worker_config.studio_user}@{worker_config.studio_machine}" | ||||
|     change_modes = ["D0755", "F0644"] | ||||
| 
 | ||||
|     if builder.service_env_id == "LOCAL" and builder.platform == "darwin": | ||||
|         worker.utils.warning( | ||||
|             "rsync change_owner not supported on darwin, ignoring for LOCAL" | ||||
|         ) | ||||
|         change_owner = None | ||||
|     else: | ||||
|         change_owner = "buildbot:www-data" | ||||
| 
 | ||||
|     # Content of the website. | ||||
|     docs_local_path = builder.code_path / "docs" / ".vitepress" / "dist" | ||||
|     docs_remote_path = pathlib.Path(worker_config.studio_folder) | ||||
| 
 | ||||
|     docs_source_path = f"{docs_local_path}/" | ||||
|     docs_dest_path = f"{connect_id}:{docs_remote_path}/" | ||||
|     worker.utils.rsync( | ||||
|         docs_source_path, | ||||
|         docs_dest_path, | ||||
|         change_modes=change_modes, | ||||
|         change_owner=change_owner, | ||||
|         port=worker_config.studio_port, | ||||
|         dry_run=dry_run, | ||||
|     ) | ||||
| 
 | ||||
|     # Downloadable artifacts. | ||||
|     artifacts_local_path = builder.code_path / "dist" | ||||
|     artifacts_remote_path = docs_remote_path / "download" | ||||
|     if artifacts_local_path.exists(): | ||||
|         artifacts_source_path = f"{artifacts_local_path}/" | ||||
|         artifact_dest_path = f"{connect_id}:{artifacts_remote_path}/" | ||||
|         worker.utils.rsync( | ||||
|             artifacts_source_path, | ||||
|             artifact_dest_path, | ||||
|             change_modes=change_modes, | ||||
|             change_owner=change_owner, | ||||
|             port=worker_config.studio_port, | ||||
|             dry_run=dry_run, | ||||
|         ) | ||||
|     else: | ||||
|         worker.utils.info("No downloadable artifacts to be copied over") | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == "__main__": | ||||
|     steps: worker.utils.BuilderSteps = OrderedDict() | ||||
|     steps["update"] = update | ||||
|     steps["compile"] = compile_doc | ||||
|     steps["deliver"] = deliver | ||||
| 
 | ||||
|     parser = worker.utils.create_argument_parser(steps=steps) | ||||
|     parser.add_argument("--needs-package-delivery", action="store_true", required=False) | ||||
| 
 | ||||
|     args = parser.parse_args() | ||||
|     builder = DocStudioBuilder(args) | ||||
|     builder.run(args.step, steps) | ||||
							
								
								
									
										583
									
								
								config/worker/utils.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										583
									
								
								config/worker/utils.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,583 @@ | |||
| # SPDX-License-Identifier: GPL-2.0-or-later | ||||
| # SPDX-FileCopyrightText: 2011-2024 Blender Authors | ||||
| # <pep8 compliant> | ||||
| 
 | ||||
| ## Utility functions used by all builders. | ||||
| 
 | ||||
| import argparse | ||||
| import atexit | ||||
| import logging | ||||
| import os | ||||
| import pathlib | ||||
| import platform | ||||
| import re | ||||
| import shutil | ||||
| import subprocess | ||||
| import sys | ||||
| import time | ||||
| 
 | ||||
| from collections import OrderedDict | ||||
| from typing import Any, Callable, Dict, List, Optional, Sequence, Union | ||||
| 
 | ||||
| # Logging | ||||
| _error_pattern = re.compile( | ||||
|     r"(^FATAL:|^ERROR:|^ERROR!|^Unhandled Error|^Traceback| error: | error | Error |FAILED: |ninja: build stopped: subcommand failed|CMake Error|SEGFAULT|Exception: SegFault |The following tests FAILED:|\*\*\*Failed|\*\*\*Exception|\*\*\*Abort|^fatal:)" | ||||
| ) | ||||
| _warning_pattern = re.compile( | ||||
|     r"(^WARNING:|^WARNING!|^WARN |Warning: | warning: | warning |warning | nvcc warning :|CMake Warning)" | ||||
| ) | ||||
| _ignore_pattern = re.compile( | ||||
|     r"(SignTool Error: CryptSIPRemoveSignedDataMsg returned error: 0x00000057|unknown mimetype for .*doctree)" | ||||
| ) | ||||
| 
 | ||||
| _errors: List[str] = [] | ||||
| _warnings: List[str] = [] | ||||
| 
 | ||||
| 
 | ||||
| def _print_warning(msg: str) -> None: | ||||
|     print("\033[33m" + msg + "\033[0m", flush=True) | ||||
| 
 | ||||
| 
 | ||||
| def _print_error(msg: str) -> None: | ||||
|     print("\033[31m" + msg + "\033[0m", flush=True) | ||||
| 
 | ||||
| 
 | ||||
| def _print_cmd(msg: str) -> None: | ||||
|     print("\033[32m" + msg + "\033[0m", flush=True) | ||||
| 
 | ||||
| 
 | ||||
| def _exit_handler() -> None: | ||||
|     if len(_warnings): | ||||
|         print("") | ||||
|         print("=" * 80) | ||||
|         print("WARNING Summary:") | ||||
|         print("=" * 80) | ||||
|         for msg in _warnings: | ||||
|             _print_warning(msg) | ||||
|     if len(_errors): | ||||
|         print("") | ||||
|         print("=" * 80) | ||||
|         print("ERROR Summary:") | ||||
|         print("=" * 80) | ||||
|         for msg in _errors: | ||||
|             _print_error(msg) | ||||
| 
 | ||||
| 
 | ||||
| atexit.register(_exit_handler) | ||||
| 
 | ||||
| 
 | ||||
| def info(msg: str) -> None: | ||||
|     print("INFO: " + msg, flush=True) | ||||
| 
 | ||||
| 
 | ||||
| def warning(msg: str) -> None: | ||||
|     _print_warning("WARN: " + msg) | ||||
|     global _warnings | ||||
|     _warnings += [msg] | ||||
| 
 | ||||
| 
 | ||||
| def error(msg: str) -> None: | ||||
|     _print_error("ERROR: " + msg) | ||||
|     global _errors | ||||
|     _errors += [msg] | ||||
| 
 | ||||
| 
 | ||||
| def exception(e: BaseException) -> None: | ||||
|     logging.exception(e) | ||||
|     global _errors | ||||
|     _errors += [str(e)] | ||||
| 
 | ||||
| 
 | ||||
| def _log_cmd(msg: str) -> None: | ||||
|     if re.search(_error_pattern, msg): | ||||
|         if not re.search(_ignore_pattern, msg): | ||||
|             _print_error(msg) | ||||
|             global _errors | ||||
|             _errors += [msg] | ||||
|             return | ||||
|     elif re.search(_warning_pattern, msg): | ||||
|         if not re.search(_ignore_pattern, msg): | ||||
|             _print_warning(msg) | ||||
|             global _warnings | ||||
|             _warnings += [msg] | ||||
|             return | ||||
| 
 | ||||
|     print(msg.encode("ascii", errors="replace").decode("ascii"), flush=True) | ||||
| 
 | ||||
| 
 | ||||
| # Command execution | ||||
| class HiddenArgument: | ||||
|     def __init__(self, value: Union[str, pathlib.Path]): | ||||
|         self.value = value | ||||
| 
 | ||||
| 
 | ||||
| CmdArgument = Union[str, pathlib.Path, HiddenArgument, Any] | ||||
| CmdList = List[CmdArgument] | ||||
| CmdSequence = Sequence[CmdArgument] | ||||
| CmdFilterOutput = Optional[Callable[[str], Optional[str]]] | ||||
| CmdEnvironment = Optional[Dict[str, str]] | ||||
| 
 | ||||
| 
 | ||||
| def _prepare_call( | ||||
|     cmd: CmdSequence, dry_run: bool = False | ||||
| ) -> Sequence[Union[str, pathlib.Path]]: | ||||
|     real_cmd: List[Union[str, pathlib.Path]] = [] | ||||
|     log_cmd: List[str] = [] | ||||
| 
 | ||||
|     for arg in cmd: | ||||
|         if isinstance(arg, HiddenArgument): | ||||
|             real_cmd += [arg.value] | ||||
|         else: | ||||
|             log_cmd += [str(arg)] | ||||
|             real_cmd += [arg] | ||||
| 
 | ||||
|     if dry_run: | ||||
|         info(f"Dry run command in path [{os.getcwd()}]") | ||||
|     else: | ||||
|         info(f"Run command in path [{os.getcwd()}]") | ||||
|     _print_cmd(" ".join(log_cmd)) | ||||
| 
 | ||||
|     return real_cmd | ||||
| 
 | ||||
| 
 | ||||
| def call( | ||||
|     cmd: CmdSequence, | ||||
|     env: CmdEnvironment = None, | ||||
|     exit_on_error: bool = True, | ||||
|     filter_output: CmdFilterOutput = None, | ||||
|     retry_count: int = 0, | ||||
|     retry_wait_time: float = 1.0, | ||||
|     dry_run: bool = False, | ||||
| ) -> int: | ||||
|     cmd = _prepare_call(cmd, dry_run) | ||||
|     if dry_run: | ||||
|         return 0 | ||||
| 
 | ||||
|     for try_count in range(0, retry_count + 1): | ||||
|         # Flush to ensure correct order output on Windows. | ||||
|         sys.stdout.flush() | ||||
|         sys.stderr.flush() | ||||
| 
 | ||||
|         proc = subprocess.Popen( | ||||
|             cmd, | ||||
|             env=env, | ||||
|             bufsize=1, | ||||
|             stdout=subprocess.PIPE, | ||||
|             stderr=subprocess.STDOUT, | ||||
|             universal_newlines=True, | ||||
|             encoding="utf-8", | ||||
|             errors="ignore", | ||||
|         ) | ||||
|         while True: | ||||
|             if not proc.stdout: | ||||
|                 break | ||||
| 
 | ||||
|             line = proc.stdout.readline() | ||||
|             if line: | ||||
|                 line_str = line.strip("\n\r") | ||||
|                 if filter_output: | ||||
|                     filter_output(line_str) | ||||
|                 else: | ||||
|                     pass | ||||
|                 if line_str: | ||||
|                     _log_cmd(line_str) | ||||
|             else: | ||||
|                 break | ||||
| 
 | ||||
|         proc.communicate() | ||||
| 
 | ||||
|         if proc.returncode == 0: | ||||
|             return 0 | ||||
| 
 | ||||
|         if try_count == retry_count: | ||||
|             if exit_on_error: | ||||
|                 sys.exit(proc.returncode) | ||||
|             return proc.returncode | ||||
|         else: | ||||
|             warning("Command failed, retrying") | ||||
|             time.sleep(retry_wait_time) | ||||
| 
 | ||||
|     return -1 | ||||
| 
 | ||||
| 
 | ||||
| def check_output(cmd: CmdSequence, exit_on_error: bool = True) -> str: | ||||
|     cmd = _prepare_call(cmd) | ||||
| 
 | ||||
|     # Flush to ensure correct order output on Windows. | ||||
|     sys.stdout.flush() | ||||
|     sys.stderr.flush() | ||||
| 
 | ||||
|     try: | ||||
|         output = subprocess.check_output( | ||||
|             cmd, stderr=subprocess.STDOUT, universal_newlines=True | ||||
|         ) | ||||
|     except subprocess.CalledProcessError as e: | ||||
|         if exit_on_error: | ||||
|             sys.exit(e.returncode) | ||||
|         output = "" | ||||
| 
 | ||||
|     return output.strip() | ||||
| 
 | ||||
| 
 | ||||
| def call_pipenv( | ||||
|     cmd: CmdSequence, filter_output: CmdFilterOutput = None, dry_run: bool = False | ||||
| ) -> int: | ||||
|     cmd_prefix: CmdList = ["pipenv"] | ||||
|     return call(cmd_prefix + list(cmd), filter_output=filter_output, dry_run=dry_run) | ||||
| 
 | ||||
| 
 | ||||
| def call_ssh(connect_id: str, cmd: CmdSequence, dry_run: bool = False) -> int: | ||||
|     ssh_cmd = [ | ||||
|         "ssh", | ||||
|         "-o", | ||||
|         "ConnectTimeout=20", | ||||
|         HiddenArgument(connect_id), | ||||
|         " ".join([str(arg) for arg in cmd]), | ||||
|     ] | ||||
|     return call(ssh_cmd, retry_count=3, dry_run=dry_run) | ||||
| 
 | ||||
| 
 | ||||
| def rsync( | ||||
|     source_path: Union[pathlib.Path, str], | ||||
|     dest_path: Union[pathlib.Path, str], | ||||
|     exclude_paths: Sequence[str] = [], | ||||
|     include_paths: Sequence[str] = [], | ||||
|     change_modes: Sequence[str] = [], | ||||
|     change_owner: Optional[str] = None, | ||||
|     show_names: bool = False, | ||||
|     delete: bool = False, | ||||
|     delete_path_check: Optional[str] = None, | ||||
|     dry_run: bool = False, | ||||
|     port: int = 22, | ||||
|     retry_count: int = 3, | ||||
| ) -> int: | ||||
|     # Extra check on path, because delete is risky if pointed at a | ||||
|     # root folder that contains other data. | ||||
|     if delete: | ||||
|         if not delete_path_check: | ||||
|             raise Exception("Rsync: delete requires delete_path_check") | ||||
|         if str(dest_path).find(delete_path_check) == -1: | ||||
|             raise Exception("Rsync: remote path must contain '{delete_path_check}'") | ||||
| 
 | ||||
|     if show_names: | ||||
|         pass | ||||
| 
 | ||||
|     cmd: List[Union[str, pathlib.Path, HiddenArgument]] = [ | ||||
|         "rsync", | ||||
|         # SSH options | ||||
|         "-e", | ||||
|         f"ssh -o ConnectTimeout=20 -p {port}", | ||||
|         # The -rlpgoDv options below are equivalent to --archive apart from updating | ||||
|         # the timestamp of the files on the receiving side. This should prevent them | ||||
|         # from getting marked for zfs-snapshots. | ||||
|         "--timeout=60", | ||||
|         "--checksum", | ||||
|         "-rlpgoDv", | ||||
|         "--partial", | ||||
|     ] | ||||
|     if change_owner: | ||||
|         cmd += [f"--chown={change_owner}"] | ||||
|     if delete: | ||||
|         cmd += ["--delete"] | ||||
|     # cmd += [f"--info={info_options}"] | ||||
|     cmd += [f"--include={item}" for item in include_paths] | ||||
|     cmd += [f"--exclude={item}" for item in exclude_paths] | ||||
|     cmd += [f"--chmod={item}" for item in change_modes] | ||||
| 
 | ||||
|     cmd += [source_path] | ||||
|     cmd += [HiddenArgument(dest_path)] | ||||
| 
 | ||||
|     return call(cmd, retry_count=retry_count, dry_run=dry_run) | ||||
| 
 | ||||
| 
 | ||||
| def move(path_from: pathlib.Path, path_to: pathlib.Path, dry_run: bool = False) -> None: | ||||
|     if dry_run: | ||||
|         return | ||||
|     # str() works around typing bug in Python 3.6. | ||||
|     shutil.move(str(path_from), path_to) | ||||
| 
 | ||||
| 
 | ||||
| def copy_dir( | ||||
|     path_from: pathlib.Path, path_to: pathlib.Path, dry_run: bool = False | ||||
| ) -> None: | ||||
|     if dry_run: | ||||
|         return | ||||
|     shutil.copytree(path_from, path_to) | ||||
| 
 | ||||
| 
 | ||||
| def copy_file( | ||||
|     path_from: pathlib.Path, path_to: pathlib.Path, dry_run: bool = False | ||||
| ) -> None: | ||||
|     if dry_run: | ||||
|         return | ||||
|     shutil.copy2(path_from, path_to) | ||||
| 
 | ||||
| 
 | ||||
| def remove_file( | ||||
|     path: pathlib.Path, | ||||
|     retry_count: int = 3, | ||||
|     retry_wait_time: float = 5.0, | ||||
|     dry_run: bool = False, | ||||
| ) -> None: | ||||
|     if not path.exists(): | ||||
|         return | ||||
|     if dry_run: | ||||
|         info(f"Removing {path} (dry run)") | ||||
|         return | ||||
| 
 | ||||
|     info(f"Removing {path}") | ||||
|     for try_count in range(retry_count): | ||||
|         try: | ||||
|             if path.exists(): | ||||
|                 path.unlink() | ||||
|             return | ||||
|         except FileNotFoundError: | ||||
|             # File was already removed by another process. | ||||
|             return | ||||
|         except PermissionError as e: | ||||
|             warning(f"Permission error when removing {path}: {e}") | ||||
|             time.sleep(retry_wait_time) | ||||
|         except OSError as e: | ||||
|             warning(f"OS error when removing {path}: {e}") | ||||
|             time.sleep(retry_wait_time) | ||||
| 
 | ||||
|     # Final attempt outside the retry loop | ||||
|     try: | ||||
|         if path.exists(): | ||||
|             path.unlink() | ||||
|     except FileNotFoundError: | ||||
|         pass | ||||
|     except PermissionError as e: | ||||
|         error(f"Failed to remove {path} due to permission issues: {e}") | ||||
|     except OSError as e: | ||||
|         error(f"Failed to remove {path} after retries due to OS error: {e}") | ||||
| 
 | ||||
| 
 | ||||
| # Retry several times by default, giving it a chance for possible antivirus to release | ||||
| # a lock on files in the build folder. Happened for example with MSI files on Windows. | ||||
| def remove_dir( | ||||
|     path: pathlib.Path, retry_count: int = 3, retry_wait_time: float = 5.0 | ||||
| ) -> None: | ||||
|     for try_count in range(retry_count): | ||||
|         try: | ||||
|             if path.exists(): | ||||
|                 shutil.rmtree(path) | ||||
|             return  # Successfully removed, no need to retry | ||||
|         except PermissionError as e: | ||||
|             if platform.system().lower() == "windows": | ||||
|                 # Debugging access denied errors on Windows | ||||
|                 if path.name == "build_package": | ||||
|                     info("Removal of package artifacts folder failed. Investigating...") | ||||
|                     msi_path = ( | ||||
|                         path | ||||
|                         / "_CPack_Packages" | ||||
|                         / "Windows" | ||||
|                         / "WIX" | ||||
|                         / "blender-windows64.msi" | ||||
|                     ) | ||||
|                     if msi_path.exists(): | ||||
|                         info(f"Information about [{msi_path}]") | ||||
|                         call(["handle64", msi_path], exit_on_error=False) | ||||
|                         call( | ||||
|                             ["pwsh", "-command", f"Get-Item {msi_path} | Format-List"], | ||||
|                             exit_on_error=False, | ||||
|                         ) | ||||
|                         call( | ||||
|                             ["pwsh", "-command", f"Get-Acl {msi_path} | Format-List"], | ||||
|                             exit_on_error=False, | ||||
|                         ) | ||||
|                     else: | ||||
|                         info(f"MSI package file [{msi_path}] does not exist") | ||||
|             warning(f"Permission error when removing {path}: {e}") | ||||
|             time.sleep(retry_wait_time) | ||||
|         except FileNotFoundError: | ||||
|             # The directory is already gone; no action needed. | ||||
|             return | ||||
|         except OSError as e: | ||||
|             warning(f"OS error when attempting to remove {path}: {e}") | ||||
|             time.sleep(retry_wait_time) | ||||
| 
 | ||||
|     # Final attempt outside of retries | ||||
|     if path.exists(): | ||||
|         try: | ||||
|             shutil.rmtree(path) | ||||
|         except PermissionError as e: | ||||
|             error(f"Failed to remove {path} due to permission issues: {e}") | ||||
|         except OSError as e: | ||||
|             error(f"Failed to remove {path} after retries due to OS error: {e}") | ||||
| 
 | ||||
| 
 | ||||
| def is_tool(name: Union[str, pathlib.Path]) -> bool: | ||||
|     """Check whether `name` is on PATH and marked as executable.""" | ||||
|     return shutil.which(name) is not None | ||||
| 
 | ||||
| 
 | ||||
| # Update source code from git repository. | ||||
| def update_source( | ||||
|     app_org: str, | ||||
|     app_id: str, | ||||
|     code_path: pathlib.Path, | ||||
|     branch_id: str = "main", | ||||
|     patch_id: Optional[str] = None, | ||||
|     commit_id: Optional[str] = None, | ||||
|     update_submodules: bool = False, | ||||
| ) -> None: | ||||
|     repo_url = f"https://projects.blender.org/{app_org}/{app_id}.git" | ||||
| 
 | ||||
|     if not code_path.exists(): | ||||
|         # Clone new | ||||
|         info(f"Cloning {repo_url}") | ||||
|         call(["git", "clone", "--progress", repo_url, code_path]) | ||||
|     else: | ||||
|         for index_lock_path in code_path.rglob(".git/index.lock"): | ||||
|             warning("Removing git lock, probably left behind by killed git process") | ||||
|             remove_file(index_lock_path) | ||||
|         for index_lock_path in (code_path / ".git" / "modules").rglob("index.lock"): | ||||
|             warning( | ||||
|                 "Removing submodule git lock, probably left behind by killed git process" | ||||
|             ) | ||||
|             remove_file(index_lock_path) | ||||
| 
 | ||||
|     os.chdir(code_path) | ||||
| 
 | ||||
|     # Fix error: "fatal: bad object refs/remotes/origin/HEAD" | ||||
|     call(["git", "remote", "set-head", "origin", "--auto"]) | ||||
| 
 | ||||
|     # Change to new Gitea URL. | ||||
|     call(["git", "remote", "set-url", "origin", repo_url]) | ||||
|     call(["git", "submodule", "sync"]) | ||||
| 
 | ||||
|     # Fetch and clean | ||||
|     call(["git", "fetch", "origin", "--prune"]) | ||||
|     call(["git", "clean", "-f", "-d"]) | ||||
|     call(["git", "reset", "--hard"]) | ||||
| 
 | ||||
|     rebase_merge_path = code_path / ".git" / "rebase-merge" | ||||
|     if rebase_merge_path.exists(): | ||||
|         info(f"Path {rebase_merge_path} exists, removing !") | ||||
|         shutil.rmtree(rebase_merge_path) | ||||
| 
 | ||||
|     if patch_id: | ||||
|         # Pull request. | ||||
|         pull_request_id = patch_id | ||||
|         branch_name = f"PR{pull_request_id}" | ||||
| 
 | ||||
|         # Checkout pull request into PR123 branch. | ||||
|         call(["git", "checkout", "main"]) | ||||
|         call( | ||||
|             [ | ||||
|                 "git", | ||||
|                 "fetch", | ||||
|                 "-f", | ||||
|                 "origin", | ||||
|                 f"pull/{pull_request_id}/head:{branch_name}", | ||||
|             ] | ||||
|         ) | ||||
|         call(["git", "checkout", branch_name]) | ||||
| 
 | ||||
|         if commit_id and (commit_id != "HEAD"): | ||||
|             call(["git", "reset", "--hard", commit_id]) | ||||
|     else: | ||||
|         # Branch. | ||||
|         call(["git", "checkout", branch_id]) | ||||
| 
 | ||||
|         if commit_id and (commit_id != "HEAD"): | ||||
|             call(["git", "reset", "--hard", commit_id]) | ||||
|         else: | ||||
|             call(["git", "reset", "--hard", "origin/" + branch_id]) | ||||
| 
 | ||||
|     if update_submodules: | ||||
|         call(["git", "submodule", "init"]) | ||||
| 
 | ||||
|     # Resolve potential issues with submodules even if other code | ||||
|     # is responsible for updating them. | ||||
|     call(["git", "submodule", "foreach", "git", "clean", "-f", "-d"]) | ||||
|     call(["git", "submodule", "foreach", "git", "reset", "--hard"]) | ||||
| 
 | ||||
|     if update_submodules: | ||||
|         call(["git", "submodule", "update"]) | ||||
| 
 | ||||
| 
 | ||||
| # Workaroud missing type info in 3.8. | ||||
| if sys.version_info >= (3, 9): | ||||
|     BuilderSteps = OrderedDict[str, Callable[[Any], None]] | ||||
| else: | ||||
|     BuilderSteps = Any | ||||
| 
 | ||||
| 
 | ||||
| class Builder: | ||||
|     def __init__(self, args: argparse.Namespace, app_org: str, app_id: str): | ||||
|         self.service_env_id = args.service_env_id | ||||
|         self.track_id = args.track_id | ||||
|         self.branch_id = args.branch_id | ||||
|         self.patch_id = args.patch_id | ||||
|         self.commit_id = args.commit_id | ||||
|         self.platform = platform.system().lower() | ||||
|         self.architecture = platform.machine().lower() | ||||
|         self.app_org = app_org | ||||
|         self.app_id = app_id | ||||
| 
 | ||||
|         if not self.branch_id: | ||||
|             branches_config = self.get_branches_config() | ||||
|             self.branch_id = branches_config.track_code_branches[self.track_id] | ||||
| 
 | ||||
|         self.tracks_root_path = self.get_worker_config().tracks_root_path | ||||
|         self.track_path = self.tracks_root_path / (self.app_id + "-" + self.track_id) | ||||
|         self.code_path = self.track_path / (self.app_id + ".git") | ||||
| 
 | ||||
|         info(f"Setting up builder paths from [{self.track_path}]") | ||||
| 
 | ||||
|     def setup_track_path(self) -> None: | ||||
|         # Create track directory if doesn't exist already. | ||||
|         os.makedirs(self.track_path, exist_ok=True) | ||||
|         os.chdir(self.track_path) | ||||
| 
 | ||||
|         # Clean up any existing pipenv files. | ||||
|         remove_file(self.track_path / "Pipfile") | ||||
|         remove_file(self.track_path / "Pipfile.lock") | ||||
|         remove_file(self.code_path / "Pipfile") | ||||
|         remove_file(self.code_path / "Pipfile.lock") | ||||
| 
 | ||||
|     def update_source(self, update_submodules: bool = False) -> None: | ||||
|         update_source( | ||||
|             self.app_org, | ||||
|             self.app_id, | ||||
|             self.code_path, | ||||
|             branch_id=self.branch_id, | ||||
|             patch_id=self.patch_id, | ||||
|             commit_id=self.commit_id, | ||||
|             update_submodules=update_submodules, | ||||
|         ) | ||||
| 
 | ||||
|     def run(self, step: str, steps: BuilderSteps) -> None: | ||||
|         try: | ||||
|             if step == "all": | ||||
|                 for func in steps.values(): | ||||
|                     func(self) | ||||
|             else: | ||||
|                 steps[step](self) | ||||
|         except Exception as e: | ||||
|             exception(e) | ||||
|             sys.exit(1) | ||||
| 
 | ||||
|     def get_worker_config(self) -> Any: | ||||
|         import conf.worker | ||||
| 
 | ||||
|         return conf.worker.get_config(self.service_env_id) | ||||
| 
 | ||||
|     def get_branches_config(self) -> Any: | ||||
|         import conf.branches | ||||
| 
 | ||||
|         return conf.branches | ||||
| 
 | ||||
| 
 | ||||
| def create_argument_parser(steps: BuilderSteps) -> argparse.ArgumentParser: | ||||
|     parser = argparse.ArgumentParser() | ||||
|     parser.add_argument("--service-env-id", type=str, required=False, default="LOCAL") | ||||
|     parser.add_argument("--track-id", default="vdev", type=str, required=False) | ||||
|     parser.add_argument("--branch-id", default="", type=str, required=False) | ||||
|     parser.add_argument("--patch-id", default="", type=str, required=False) | ||||
|     parser.add_argument("--commit-id", default="", type=str, required=False) | ||||
|     all_steps = list(steps.keys()) + ["all"] | ||||
|     parser.add_argument("step", choices=all_steps) | ||||
|     return parser | ||||
|  | @ -1,5 +1,24 @@ | |||
| services: | ||||
|   buildbot-master: | ||||
|     env_file: .env | ||||
|     env_file: .env.local | ||||
|     volumes: | ||||
|       - ./config:/buildbot/config | ||||
| 
 | ||||
|   buildbot-worker: | ||||
|     image: 'buildbot/buildbot-worker:${BUILDBOT_IMAGE_TAG:-v4.1.0}' | ||||
|     restart: unless-stopped | ||||
|     environment: | ||||
|       - 'BUILDMASTER=${BUILDMASTER:-buildbot-master}' | ||||
|       - 'BUILDMASTER_PORT=${BUILDBOT_WORKER_PORT:-9989}' | ||||
|       - 'WORKERNAME=${WORKERNAME:-example-worker}' | ||||
|       - 'WORKERPASS=${WORKERPASS:-pass}' | ||||
|       - 'WORKER_ENVIRONMENT_BLACKLIST=${WORKER_ENVIRONMENT_BLACKLIST:-DOCKER_BUILDBOT* BUILDBOT_ENV_* BUILDBOT_1* WORKER_ENVIRONMENT_BLACKLIST}' | ||||
|     healthcheck: | ||||
|       test: | ||||
|         - CMD | ||||
|         - curl | ||||
|         - '-f' | ||||
|         - 'http://$${BUILDMASTER}:$${BUILDMASTER_PORT}' | ||||
|       interval: 5s | ||||
|       timeout: 20s | ||||
|       retries: 10 | ||||
|  | @ -18,7 +18,7 @@ services: | |||
|         - CMD | ||||
|         - curl | ||||
|         - '-f' | ||||
|         - 'http://localhost:$${BUILDBOT_WEB_PORT}' | ||||
|         - '$${BUILDBOT_WEB_URL}' | ||||
|       interval: 2s | ||||
|       timeout: 10s | ||||
|       retries: 15 | ||||
|  | @ -41,23 +41,5 @@ services: | |||
|       - 'POSTGRES_PASSWORD=${SERVICE_PASSWORD_POSTGRESQL}' | ||||
|       - 'POSTGRES_USER=${SERVICE_USER_POSTGRESQL}' | ||||
|       - 'POSTGRES_DB=${POSTGRES_DB:-buildbot}' | ||||
|   buildbot-worker: | ||||
|     image: 'buildbot/buildbot-worker:${BUILDBOT_IMAGE_TAG:-v4.1.0}' | ||||
|     restart: unless-stopped | ||||
|     environment: | ||||
|       - 'BUILDMASTER=${BUILDMASTER:-buildbot-master}' | ||||
|       - 'BUILDMASTER_PORT=${BUILDBOT_WORKER_PORT:-9989}' | ||||
|       - 'WORKERNAME=${WORKERNAME:-example-worker}' | ||||
|       - 'WORKERPASS=${WORKERPASS:-pass}' | ||||
|       - 'WORKER_ENVIRONMENT_BLACKLIST=${WORKER_ENVIRONMENT_BLACKLIST:-DOCKER_BUILDBOT* BUILDBOT_ENV_* BUILDBOT_1* WORKER_ENVIRONMENT_BLACKLIST}' | ||||
|     healthcheck: | ||||
|       test: | ||||
|         - CMD | ||||
|         - curl | ||||
|         - '-f' | ||||
|         - 'http://$${BUILDMASTER}:$${BUILDMASTER_PORT}' | ||||
|       interval: 5s | ||||
|       timeout: 20s | ||||
|       retries: 10 | ||||
| volumes: | ||||
|   buildbot-db: {} | ||||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue