346 lines
12 KiB
Python
Executable file
346 lines
12 KiB
Python
Executable file
#!/usr/bin/env python3
|
|
# SPDX-License-Identifier: GPL-2.0-or-later
|
|
# SPDX-FileCopyrightText: 2011-2024 Blender Authors
|
|
# <pep8 compliant>
|
|
|
|
import argparse
|
|
import datetime
|
|
import os
|
|
import pathlib
|
|
import random
|
|
import re
|
|
import sys
|
|
import time
|
|
|
|
from collections import OrderedDict
|
|
from typing import Any, Dict, List, Optional, Sequence, Union
|
|
|
|
sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent))
|
|
|
|
import worker.utils
|
|
|
|
package_file_pattern = re.compile(
|
|
r"^(?P<app_id>(blender|bpy))\-"
|
|
+ r"(?P<version_id>[0-9]+\.[0-9]+\.[0-9]+)\-"
|
|
+ r"(?P<risk_id>[a-z]+)\+"
|
|
+ r"(?P<branch_id>[A-Za-z0-9_\-]+)\."
|
|
+ r"(?P<commit_hash>[a-fA-f0-9]+)\-"
|
|
+ r"(?P<platform_id>[A-Za-z0-9_]+)\."
|
|
+ r"(?P<architecture>[A-Za-z0-9_]+)\-"
|
|
+ r"(?P<build_configuration>(release|asserts|sanitizer|debug))\."
|
|
+ r"(?P<file_extension>[A-Za-z0-9\.]+)"
|
|
)
|
|
|
|
pipeline_types = ["daily", "experimental", "patch"]
|
|
platforms = ["linux", "windows", "darwin"]
|
|
architectures = ["x86_64", "amd64", "arm64"]
|
|
build_configurations = ["release", "asserts", "sanitizer", "debug"]
|
|
|
|
|
|
class ArchiveBuilder(worker.utils.Builder):
|
|
def __init__(self, args: argparse.Namespace):
|
|
super().__init__(args, "blender", "blender")
|
|
self.pipeline_type = args.pipeline_type
|
|
self.platform_id = args.platform_id
|
|
self.architecture = args.architecture
|
|
self.build_configuration = args.build_configuration
|
|
self.python_module = args.python_module
|
|
self.dry_run = args.dry_run
|
|
self.retention_in_days = args.retention_in_days
|
|
|
|
|
|
def file_age_in_days(file_path: pathlib.Path) -> float:
|
|
try:
|
|
file_path_mtime = os.path.getmtime(file_path)
|
|
except:
|
|
return 0.0
|
|
|
|
age_in_seconds = time.time() - file_path_mtime
|
|
return age_in_seconds / (3600.0 * 24.0)
|
|
|
|
|
|
def parse_build_info(file_path: pathlib.Path) -> Optional[Dict]:
|
|
file_name = file_path.name
|
|
matches = re.match(package_file_pattern, file_path.name)
|
|
if not matches:
|
|
return None
|
|
build_info: Dict[str, Union[str, float, pathlib.Path]] = dict(matches.groupdict())
|
|
build_info["file_age_in_days"] = file_age_in_days(file_path)
|
|
build_info["file_path"] = file_path
|
|
return build_info
|
|
|
|
|
|
def archive_build(file_path: pathlib.Path, dry_run: bool) -> None:
|
|
# Archive build file itself and checksum
|
|
checksum_file_path = file_path.parent / (file_path.name + ".sha256")
|
|
|
|
for source_file_path in [file_path, checksum_file_path]:
|
|
if not source_file_path.exists():
|
|
continue
|
|
|
|
archive_path = source_file_path.parent / "archive"
|
|
os.makedirs(archive_path, exist_ok=True)
|
|
dest_file_path = archive_path / source_file_path.name
|
|
|
|
worker.utils.remove_file(dest_file_path, dry_run=dry_run)
|
|
worker.utils.move(source_file_path, dest_file_path, dry_run=dry_run)
|
|
|
|
|
|
def fetch_current_builds(
|
|
builder: ArchiveBuilder,
|
|
pipeline_type: str,
|
|
short_version: Optional[str] = None,
|
|
all_platforms: bool = False,
|
|
) -> Dict[Any, List[Any]]:
|
|
app_id = "bpy" if builder.python_module else "blender"
|
|
|
|
worker_config = builder.get_worker_config()
|
|
download_path = worker_config.buildbot_download_folder
|
|
pipeline_build_path = download_path / pipeline_type
|
|
|
|
print(f"Fetching current builds in [{pipeline_build_path}]")
|
|
build_groups: Dict[Any, List[Any]] = {}
|
|
for file_path in pipeline_build_path.glob("*.*"):
|
|
if not file_path.is_file():
|
|
continue
|
|
if file_path.name.endswith(".sha256"):
|
|
continue
|
|
|
|
build_info = parse_build_info(file_path)
|
|
if not build_info:
|
|
continue
|
|
if short_version and not build_info["version_id"].startswith(short_version + "."):
|
|
continue
|
|
|
|
if not all_platforms:
|
|
if builder.architecture and build_info["architecture"] != builder.architecture:
|
|
continue
|
|
if builder.platform_id and build_info["platform_id"] != builder.platform_id:
|
|
continue
|
|
if (
|
|
builder.build_configuration
|
|
and build_info["build_configuration"] != builder.build_configuration
|
|
):
|
|
continue
|
|
|
|
if pipeline_type == "daily":
|
|
key = (
|
|
"daily",
|
|
build_info["file_extension"],
|
|
build_info["architecture"],
|
|
build_info["platform_id"],
|
|
)
|
|
else:
|
|
key = (
|
|
build_info["branch_id"],
|
|
build_info["file_extension"],
|
|
build_info["architecture"],
|
|
build_info["platform_id"],
|
|
)
|
|
|
|
if key in build_groups:
|
|
build_groups[key].append(build_info)
|
|
else:
|
|
build_groups[key] = [build_info]
|
|
|
|
return build_groups
|
|
|
|
|
|
def archive_build_group(
|
|
builds: Sequence[Dict], retention_in_days: int, dry_run: bool = True
|
|
) -> None:
|
|
builds = sorted(builds, key=lambda build: build["file_age_in_days"])
|
|
|
|
for i, build in enumerate(builds):
|
|
build_age = build["file_age_in_days"]
|
|
build_name = build["file_path"].name
|
|
|
|
# Only keep the most recent build if there are multiple
|
|
if i > 0 or build_age > retention_in_days:
|
|
print(f"Archiving [{build_name}] (age: {build_age:.3f} days)")
|
|
archive_build(build["file_path"], dry_run=dry_run)
|
|
else:
|
|
print(f"Keeping [{build_name}] (age: {build_age:.3f} days)")
|
|
|
|
|
|
def deduplicate(builder: ArchiveBuilder) -> None:
|
|
retention_in_days = builder.retention_in_days
|
|
dry_run = builder.dry_run
|
|
|
|
# Get major.minor version to match.
|
|
short_version = ""
|
|
if builder.pipeline_type == "daily":
|
|
branches_config = builder.get_branches_config()
|
|
short_version = branches_config.track_major_minor_versions[builder.track_id]
|
|
|
|
if not short_version:
|
|
raise BaseException(f"Missing version in [{builder.pipeline_type}] builds, aborting")
|
|
|
|
build_groups = fetch_current_builds(builder, builder.pipeline_type, short_version=short_version)
|
|
|
|
print(
|
|
f"Deduplicating [{builder.pipeline_type}] builds for [{short_version}] [{builder.build_configuration}] [{builder.platform_id}] [{builder.architecture}]"
|
|
)
|
|
for key, build_group in build_groups.items():
|
|
print("")
|
|
print("--- Group: " + str(key))
|
|
archive_build_group(build_group, retention_in_days, dry_run=dry_run)
|
|
|
|
|
|
def fetch_purge_builds(
|
|
builder: ArchiveBuilder, pipeline_type: str, folder: str
|
|
) -> Sequence[pathlib.Path]:
|
|
worker_config = builder.get_worker_config()
|
|
download_path = worker_config.buildbot_download_folder
|
|
archive_path = download_path / pipeline_type / folder
|
|
os.makedirs(archive_path, exist_ok=True)
|
|
|
|
print(f"Fetching archived builds in [{archive_path}]")
|
|
builds = []
|
|
for file_path in archive_path.glob("*.*"):
|
|
if not file_path.is_file():
|
|
continue
|
|
if file_path.name.endswith(".sha256"):
|
|
continue
|
|
|
|
builds.append(file_path)
|
|
|
|
return builds
|
|
|
|
|
|
def purge(builder: ArchiveBuilder) -> None:
|
|
builds_retention_in_days = builder.retention_in_days
|
|
tests_retention_in_days = 10
|
|
dry_run = builder.dry_run
|
|
|
|
for pipeline_type in pipeline_types:
|
|
if pipeline_type != "daily":
|
|
print("=" * 120)
|
|
print(f"Deduplicating [{pipeline_type}] builds")
|
|
build_groups = fetch_current_builds(builder, pipeline_type, all_platforms=True)
|
|
for key, build_group in build_groups.items():
|
|
print("")
|
|
print("--- Group: " + str(key))
|
|
archive_build_group(build_group, builds_retention_in_days, dry_run=dry_run)
|
|
|
|
print("=" * 120)
|
|
print(f"Purging [{pipeline_type}] builds older than [{builds_retention_in_days}] days")
|
|
for file_path in fetch_purge_builds(builder, pipeline_type, "archive"):
|
|
if file_age_in_days(file_path) < builds_retention_in_days:
|
|
continue
|
|
|
|
age = file_age_in_days(file_path)
|
|
checksum_file_path = file_path.parent / (file_path.name + ".sha256")
|
|
|
|
print(f"Deleting [{file_path.name}] (age: {age:.3f} days)")
|
|
worker.utils.remove_file(file_path, dry_run=dry_run)
|
|
worker.utils.remove_file(checksum_file_path, dry_run=dry_run)
|
|
|
|
print("=" * 120)
|
|
print(f"Purging [{pipeline_type}] tests older than [{tests_retention_in_days}] days")
|
|
for file_path in fetch_purge_builds(builder, pipeline_type, "tests"):
|
|
if file_age_in_days(file_path) < tests_retention_in_days:
|
|
continue
|
|
|
|
age = file_age_in_days(file_path)
|
|
checksum_file_path = file_path.parent / (file_path.name + ".sha256")
|
|
|
|
print(f"Deleting [{file_path.name}] (age: {age:.3f} days)")
|
|
worker.utils.remove_file(file_path, dry_run=dry_run)
|
|
worker.utils.remove_file(checksum_file_path, dry_run=dry_run)
|
|
|
|
|
|
def generate_test_data(builder: ArchiveBuilder) -> None:
|
|
worker_config = builder.get_worker_config()
|
|
download_path = worker_config.buildbot_download_folder
|
|
|
|
branches_config = builder.get_branches_config()
|
|
short_version = branches_config.track_major_minor_versions[builder.track_id]
|
|
version = short_version + ".0"
|
|
|
|
app_id = "bpy" if builder.python_module else "blender"
|
|
commit_hashes = ["1ddf858", "03a2a53"]
|
|
risk_ids = ["stable", "alpha"]
|
|
file_extensions = ["zip", "msi"]
|
|
|
|
if builder.pipeline_type == "daily":
|
|
versions = [short_version + ".0", short_version + ".1"]
|
|
branches = ["main", "v50"]
|
|
build_configurations = ["release"]
|
|
elif builder.pipeline_type == "patch":
|
|
versions = ["5.0.0", "7.0.0"]
|
|
branches = ["PR123", "PR456", "PR789"]
|
|
build_configurations = ["release", "debug"]
|
|
else:
|
|
versions = ["4.0.0", "6.0.0"]
|
|
branches = ["realtime-compositor", "cycles-x"]
|
|
build_configurations = ["release", "debug"]
|
|
|
|
pipeline_path = download_path / builder.pipeline_type
|
|
os.makedirs(pipeline_path, exist_ok=True)
|
|
|
|
for i in range(0, 25):
|
|
filename = (
|
|
app_id
|
|
+ "-"
|
|
+ random.choice(versions)
|
|
+ "-"
|
|
+ random.choice(risk_ids)
|
|
+ "+"
|
|
+ random.choice(branches)
|
|
+ "."
|
|
+ random.choice(commit_hashes)
|
|
+ "-"
|
|
+ builder.platform_id
|
|
+ "."
|
|
+ builder.architecture
|
|
+ "-"
|
|
+ random.choice(build_configurations)
|
|
+ "."
|
|
+ random.choice(file_extensions)
|
|
)
|
|
|
|
file_path = pipeline_path / filename
|
|
file_path.write_text("Test")
|
|
|
|
checksum_file_path = file_path.parent / (file_path.name + ".sha256")
|
|
checksum_file_path.write_text("Test")
|
|
|
|
delta = datetime.timedelta(days=365 * random.random())
|
|
filetime = time.mktime((datetime.datetime.today() - delta).timetuple())
|
|
os.utime(file_path, (filetime, filetime))
|
|
os.utime(checksum_file_path, (filetime, filetime))
|
|
|
|
|
|
if __name__ == "__main__":
|
|
steps: worker.utils.BuilderSteps = OrderedDict()
|
|
steps["deduplicate-binaries"] = deduplicate
|
|
steps["purge-binaries"] = purge
|
|
|
|
parser = worker.utils.create_argument_parser(steps=steps)
|
|
parser.add_argument(
|
|
"--pipeline-type", default="daily", type=str, choices=pipeline_types, required=False
|
|
)
|
|
parser.add_argument("--platform-id", default="", type=str, choices=platforms, required=False)
|
|
parser.add_argument(
|
|
"--architecture", default="", type=str, choices=architectures, required=False
|
|
)
|
|
parser.add_argument(
|
|
"--build-configuration",
|
|
default="release",
|
|
type=str,
|
|
choices=build_configurations,
|
|
required=False,
|
|
)
|
|
parser.add_argument("--retention-in-days", default=100, type=int, required=False)
|
|
parser.add_argument("--python-module", action="store_true", required=False)
|
|
parser.add_argument("--dry-run", action="store_true", required=False)
|
|
parser.add_argument("--generate-test-data", action="store_true", required=False)
|
|
|
|
args = parser.parse_args()
|
|
builder = ArchiveBuilder(args)
|
|
|
|
if args.generate_test_data:
|
|
generate_test_data(builder)
|
|
|
|
builder.run(args.step, steps)
|