Merge branch 'blender-v4.2-release'
This commit is contained in:
commit
8caf042d0f
@ -695,6 +695,11 @@ def pkg_manifest_archive_url_abs_from_remote_url(remote_url: str, archive_url: s
|
|||||||
return archive_url
|
return archive_url
|
||||||
|
|
||||||
|
|
||||||
|
def pkg_manifest_dict_apply_build_generated_table(manifest_dict: Dict[str, Any]) -> None:
|
||||||
|
from .cli.blender_ext import pkg_manifest_dict_apply_build_generated_table as fn
|
||||||
|
fn(manifest_dict)
|
||||||
|
|
||||||
|
|
||||||
def pkg_is_legacy_addon(filepath: str) -> bool:
|
def pkg_is_legacy_addon(filepath: str) -> bool:
|
||||||
from .cli.blender_ext import pkg_is_legacy_addon as pkg_is_legacy_addon_extern
|
from .cli.blender_ext import pkg_is_legacy_addon as pkg_is_legacy_addon_extern
|
||||||
result = pkg_is_legacy_addon_extern(filepath)
|
result = pkg_is_legacy_addon_extern(filepath)
|
||||||
@ -1233,6 +1238,10 @@ def repository_filter_packages(
|
|||||||
)) is None:
|
)) is None:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# No need to call: `pkg_manifest_dict_apply_build_generated_table(item_local)`
|
||||||
|
# Because these values will have been applied when generating the JSON.
|
||||||
|
assert "generated" not in item.get("build", {})
|
||||||
|
|
||||||
if repository_filter_skip(item, filter_params, error_fn):
|
if repository_filter_skip(item, filter_params, error_fn):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -1498,6 +1507,9 @@ class _RepoDataSouce_TOML_FILES(_RepoDataSouce_ABC):
|
|||||||
)) is None:
|
)) is None:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# Apply generated variables before filtering.
|
||||||
|
pkg_manifest_dict_apply_build_generated_table(item_local)
|
||||||
|
|
||||||
if repository_filter_skip(item_local, self._filter_params, error_fn):
|
if repository_filter_skip(item_local, self._filter_params, error_fn):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -114,6 +114,10 @@ TERSE_DESCRIPTION_MAX_LENGTH = 64
|
|||||||
def print(*args: Any, **kw: Dict[str, Any]) -> None:
|
def print(*args: Any, **kw: Dict[str, Any]) -> None:
|
||||||
raise Exception("Illegal print(*({!r}), **{{{!r}}})".format(args, kw))
|
raise Exception("Illegal print(*({!r}), **{{{!r}}})".format(args, kw))
|
||||||
|
|
||||||
|
# # Useful for testing.
|
||||||
|
# def print(*args: Any, **kw: Dict[str, Any]):
|
||||||
|
# __builtins__.print(*args, **kw, file=open('/tmp/output.txt', 'a'))
|
||||||
|
|
||||||
|
|
||||||
def debug_stack_trace_to_file() -> None:
|
def debug_stack_trace_to_file() -> None:
|
||||||
"""
|
"""
|
||||||
@ -613,6 +617,8 @@ def pkg_manifest_from_zipfile_and_validate_impl(
|
|||||||
manifest_dict = toml_from_bytes(file_content)
|
manifest_dict = toml_from_bytes(file_content)
|
||||||
assert isinstance(manifest_dict, dict)
|
assert isinstance(manifest_dict, dict)
|
||||||
|
|
||||||
|
pkg_manifest_dict_apply_build_generated_table(manifest_dict)
|
||||||
|
|
||||||
# TODO: forward actual error.
|
# TODO: forward actual error.
|
||||||
if manifest_dict is None:
|
if manifest_dict is None:
|
||||||
return ["Archive does not contain a manifest"]
|
return ["Archive does not contain a manifest"]
|
||||||
@ -1620,26 +1626,164 @@ def pkg_manifest_is_valid_or_error_all(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# Manifest Utilities
|
||||||
|
|
||||||
|
def pkg_manifest_dict_apply_build_generated_table(manifest_dict: Dict[str, Any]) -> None:
|
||||||
|
# Swap in values from `[build.generated]` if it exists:
|
||||||
|
if (build_generated := manifest_dict.get("build", {}).get("generated")) is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
if (platforms := build_generated.get("platforms")) is not None:
|
||||||
|
manifest_dict["platforms"] = platforms
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------------------------------------------------------
|
# -----------------------------------------------------------------------------
|
||||||
# Standalone Utilities
|
# Standalone Utilities
|
||||||
|
|
||||||
|
platform_system_replace = {
|
||||||
|
"darwin": "macos",
|
||||||
|
}
|
||||||
|
|
||||||
|
platform_machine_replace = {
|
||||||
|
"x86_64": "x64",
|
||||||
|
"amd64": "x64",
|
||||||
|
# Used on Linux for ARM64 (APPLE already uses `arm64`).
|
||||||
|
"aarch64": "arm64",
|
||||||
|
"aarch32": "arm32",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Use when converting a Python `.whl` platform to a Blender `platform_from_this_system` platform.
|
||||||
|
platform_system_replace_for_wheels = {
|
||||||
|
"macosx": "macos",
|
||||||
|
"manylinux": "linux",
|
||||||
|
"musllinux": "linux",
|
||||||
|
"win": "windows",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def platform_from_this_system() -> str:
|
def platform_from_this_system() -> str:
|
||||||
import platform
|
import platform
|
||||||
system_replace = {
|
|
||||||
"darwin": "macos",
|
|
||||||
}
|
|
||||||
machine_replace = {
|
|
||||||
"x86_64": "x64",
|
|
||||||
"amd64": "x64",
|
|
||||||
}
|
|
||||||
system = platform.system().lower()
|
system = platform.system().lower()
|
||||||
machine = platform.machine().lower()
|
machine = platform.machine().lower()
|
||||||
return "{:s}-{:s}".format(
|
return "{:s}-{:s}".format(
|
||||||
system_replace.get(system, system),
|
platform_system_replace.get(system, system),
|
||||||
machine_replace.get(machine, machine),
|
platform_machine_replace.get(machine, machine),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def blender_platform_from_wheel_platform(wheel_platform: str) -> str:
|
||||||
|
"""
|
||||||
|
Convert a wheel to a Blender compatible platform: e.g.
|
||||||
|
- ``linux_x86_64`` -> ``linux-x64``.
|
||||||
|
- ``manylinux_2_28_x86_64`` -> ``linux-x64``.
|
||||||
|
- ``manylinux2014_aarch64`` -> ``linux-arm64``.
|
||||||
|
- ``win_amd64`` -> ``windows-x64``.
|
||||||
|
- ``macosx_11_0_arm64`` -> ``macos-arm64``.
|
||||||
|
- ``manylinux2014_x86_64`` -> ``linux-x64``.
|
||||||
|
"""
|
||||||
|
|
||||||
|
i = wheel_platform.find("_")
|
||||||
|
if i == -1:
|
||||||
|
# WARNING: this should never or almost never happen.
|
||||||
|
# Return the result as we don't have a better alternative.
|
||||||
|
return wheel_platform
|
||||||
|
|
||||||
|
head = wheel_platform[:i]
|
||||||
|
tail = wheel_platform[i + 1:]
|
||||||
|
|
||||||
|
for wheel_src, blender_dst in platform_system_replace_for_wheels.items():
|
||||||
|
if head == wheel_src:
|
||||||
|
head = blender_dst
|
||||||
|
break
|
||||||
|
# Account for:
|
||||||
|
# `manylinux2014` -> `linux`.
|
||||||
|
# `win32` -> `windows`.
|
||||||
|
if head.startswith(wheel_src) and head[len(wheel_src):].isdigit():
|
||||||
|
head = blender_dst
|
||||||
|
break
|
||||||
|
|
||||||
|
for wheel_src, blender_dst in platform_machine_replace.items():
|
||||||
|
if (tail == wheel_src) or (tail.endswith("_" + wheel_src)):
|
||||||
|
# NOTE: in some cases this skips GLIBC versions.
|
||||||
|
tail = blender_dst
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
# Avoid GLIBC or MACOS versions being included in the `machine` value.
|
||||||
|
# This works as long as all known machine values are added to `platform_machine_replace`
|
||||||
|
# (only `x86_64` at the moment).
|
||||||
|
tail = tail.rpartition("_")[2]
|
||||||
|
|
||||||
|
return "{:s}-{:s}".format(head, tail)
|
||||||
|
|
||||||
|
|
||||||
|
def blender_platform_compatible_with_wheel_platform(platform: str, wheel_platform: str) -> bool:
|
||||||
|
assert platform
|
||||||
|
if wheel_platform == "any":
|
||||||
|
return True
|
||||||
|
platform_blender = blender_platform_from_wheel_platform(wheel_platform)
|
||||||
|
return platform == platform_blender
|
||||||
|
|
||||||
|
|
||||||
|
def build_paths_filter_wheels_by_platform(
|
||||||
|
build_paths: List[Tuple[str, str]],
|
||||||
|
platform: str,
|
||||||
|
) -> List[Tuple[str, str]]:
|
||||||
|
"""
|
||||||
|
All paths are wheels with filenames that follow the wheel spec.
|
||||||
|
Return wheels which are compatible with the ``platform``.
|
||||||
|
"""
|
||||||
|
build_paths_for_platform: List[Tuple[str, str]] = []
|
||||||
|
|
||||||
|
for item in build_paths:
|
||||||
|
# Both the absolute/relative path can be used to get the filename.
|
||||||
|
# Use the relative since it's likely to be shorter.
|
||||||
|
wheel_filename = os.path.splitext(os.path.basename(item[1]))[0]
|
||||||
|
|
||||||
|
wheel_filename_split = wheel_filename.split("-")
|
||||||
|
# This should be unreachable because the manifest has been validated, add assert.
|
||||||
|
assert len(wheel_filename_split) >= 5, "Internal error, manifest validation disallows this"
|
||||||
|
|
||||||
|
wheel_platform = wheel_filename_split[-1]
|
||||||
|
|
||||||
|
if blender_platform_compatible_with_wheel_platform(platform, wheel_platform):
|
||||||
|
build_paths_for_platform.append(item)
|
||||||
|
|
||||||
|
return build_paths_for_platform
|
||||||
|
|
||||||
|
|
||||||
|
def build_paths_filter_by_platform(
|
||||||
|
build_paths: List[Tuple[str, str]],
|
||||||
|
wheel_range: Tuple[int, int],
|
||||||
|
platforms: Tuple[str, ...],
|
||||||
|
) -> Generator[Tuple[List[Tuple[str, str]], str], None, None]:
|
||||||
|
if not platforms:
|
||||||
|
yield (build_paths, "")
|
||||||
|
return
|
||||||
|
|
||||||
|
if wheel_range[0] == wheel_range[1]:
|
||||||
|
# Not an error, but there is no reason to split the packages in this case,
|
||||||
|
# caller may warn about this although it's not an error.
|
||||||
|
for platform in platforms:
|
||||||
|
yield (build_paths, platform)
|
||||||
|
return
|
||||||
|
|
||||||
|
build_paths_head = build_paths[:wheel_range[0]]
|
||||||
|
build_paths_wheels = build_paths[wheel_range[0]:wheel_range[1]]
|
||||||
|
build_paths_tail = build_paths[wheel_range[1]:]
|
||||||
|
|
||||||
|
for platform in platforms:
|
||||||
|
wheels_for_platform = build_paths_filter_wheels_by_platform(build_paths_wheels, platform)
|
||||||
|
yield (
|
||||||
|
[
|
||||||
|
*build_paths_head,
|
||||||
|
*wheels_for_platform,
|
||||||
|
*build_paths_tail,
|
||||||
|
],
|
||||||
|
platform,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def repository_filter_skip(
|
def repository_filter_skip(
|
||||||
item: Dict[str, Any],
|
item: Dict[str, Any],
|
||||||
*,
|
*,
|
||||||
@ -2107,6 +2251,25 @@ def arg_handle_str_as_package_names(value: str) -> Sequence[str]:
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# Argument Handlers ("build" command)
|
||||||
|
|
||||||
|
def generic_arg_built_split_platforms(subparse: argparse.ArgumentParser) -> None:
|
||||||
|
subparse.add_argument(
|
||||||
|
"--split-platforms",
|
||||||
|
dest="split_platforms",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help=(
|
||||||
|
"Build a separate package for each platform.\n"
|
||||||
|
"Adding the platform as a file name suffix (before the extension).\n"
|
||||||
|
"\n"
|
||||||
|
"This can be useful to reduce the upload size of packages that bundle large\n"
|
||||||
|
"platform-specific modules (``*.whl`` files)."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------------------------------------------------------
|
# -----------------------------------------------------------------------------
|
||||||
# Generate Repository
|
# Generate Repository
|
||||||
|
|
||||||
@ -2990,6 +3153,7 @@ class subcmd_author:
|
|||||||
pkg_source_dir: str,
|
pkg_source_dir: str,
|
||||||
pkg_output_dir: str,
|
pkg_output_dir: str,
|
||||||
pkg_output_filepath: str,
|
pkg_output_filepath: str,
|
||||||
|
split_platforms: bool,
|
||||||
verbose: bool,
|
verbose: bool,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
if not os.path.isdir(pkg_source_dir):
|
if not os.path.isdir(pkg_source_dir):
|
||||||
@ -3022,6 +3186,25 @@ class subcmd_author:
|
|||||||
message_error(msg_fn, "Error parsing TOML \"{:s}\" {:s}".format(pkg_manifest_filepath, error_msg))
|
message_error(msg_fn, "Error parsing TOML \"{:s}\" {:s}".format(pkg_manifest_filepath, error_msg))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
if split_platforms:
|
||||||
|
# NOTE: while this could be made into a warning which disables `split_platforms`,
|
||||||
|
# this could result in further problems for automated tasks which operate on the output
|
||||||
|
# where they would expect a platform suffix on each archive. So consider this an error.
|
||||||
|
if not manifest.platforms:
|
||||||
|
message_error(
|
||||||
|
msg_fn,
|
||||||
|
"Error in arguments \"--split-platforms\" with a manifest that does not declare \"platforms\"",
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if (manifest_build_data := manifest_data.get("build")) is not None:
|
||||||
|
if "generated" in manifest_build_data:
|
||||||
|
message_error(
|
||||||
|
msg_fn,
|
||||||
|
"Error in TOML \"{:s}\" contains reserved value: [build.generated]".format(pkg_manifest_filepath),
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
# Always include wheels & manifest.
|
# Always include wheels & manifest.
|
||||||
build_paths_extra = (
|
build_paths_extra = (
|
||||||
# Inclusion of the manifest is implicit.
|
# Inclusion of the manifest is implicit.
|
||||||
@ -3029,8 +3212,9 @@ class subcmd_author:
|
|||||||
PKG_MANIFEST_FILENAME_TOML,
|
PKG_MANIFEST_FILENAME_TOML,
|
||||||
*(manifest.wheels or ()),
|
*(manifest.wheels or ()),
|
||||||
)
|
)
|
||||||
|
build_paths_wheel_range = 1, 1 + len(manifest.wheels or ())
|
||||||
|
|
||||||
if (manifest_build_data := manifest_data.get("build")) is not None:
|
if manifest_build_data is not None:
|
||||||
manifest_build_test = PkgManifest_Build.from_dict_all_errors(
|
manifest_build_test = PkgManifest_Build.from_dict_all_errors(
|
||||||
manifest_build_data,
|
manifest_build_data,
|
||||||
extra_paths=build_paths_extra,
|
extra_paths=build_paths_extra,
|
||||||
@ -3128,53 +3312,101 @@ class subcmd_author:
|
|||||||
message_status(msg_fn, "Error building path list \"{:s}\"".format(str(ex)))
|
message_status(msg_fn, "Error building path list \"{:s}\"".format(str(ex)))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if pkg_output_filepath != "":
|
|
||||||
# The directory may be empty, that is fine as join handles this correctly.
|
|
||||||
pkg_dirpath, pkg_filename = os.path.split(pkg_output_filepath)
|
|
||||||
outfile = pkg_output_filepath
|
|
||||||
outfile_temp = os.path.join(pkg_dirpath, "." + pkg_filename)
|
|
||||||
del pkg_dirpath
|
|
||||||
else:
|
|
||||||
pkg_filename = "{:s}-{:s}{:s}".format(manifest.id, manifest.version, PKG_EXT)
|
|
||||||
outfile = os.path.join(pkg_output_dir, pkg_filename)
|
|
||||||
outfile_temp = os.path.join(pkg_output_dir, "." + pkg_filename)
|
|
||||||
|
|
||||||
request_exit = False
|
request_exit = False
|
||||||
|
|
||||||
request_exit |= message_status(msg_fn, "Building {:s}".format(pkg_filename))
|
# A pass-through when there are no platforms to split.
|
||||||
if request_exit:
|
for build_paths_for_platform, platform in build_paths_filter_by_platform(
|
||||||
return False
|
build_paths,
|
||||||
|
build_paths_wheel_range,
|
||||||
|
tuple(manifest.platforms) if (split_platforms and manifest.platforms) else (),
|
||||||
|
):
|
||||||
|
if pkg_output_filepath != "":
|
||||||
|
# The directory may be empty, that is fine as join handles this correctly.
|
||||||
|
pkg_dirpath, pkg_filename = os.path.split(pkg_output_filepath)
|
||||||
|
|
||||||
with CleanupPathsContext(files=(outfile_temp,), directories=()):
|
if platform:
|
||||||
try:
|
pkg_filename, pkg_filename_ext = os.path.splitext(pkg_filename)
|
||||||
zip_fh_context = zipfile.ZipFile(outfile_temp, 'w', zipfile.ZIP_DEFLATED, compresslevel=9)
|
pkg_filename = "{:s}-{:s}{:s}".format(
|
||||||
except Exception as ex:
|
pkg_filename,
|
||||||
message_status(msg_fn, "Error creating archive \"{:s}\"".format(str(ex)))
|
platform.replace("-", "_"),
|
||||||
|
pkg_filename_ext,
|
||||||
|
)
|
||||||
|
del pkg_filename_ext
|
||||||
|
outfile = os.path.join(pkg_dirpath, pkg_filename)
|
||||||
|
else:
|
||||||
|
outfile = pkg_output_filepath
|
||||||
|
|
||||||
|
outfile_temp = os.path.join(pkg_dirpath, "." + pkg_filename)
|
||||||
|
del pkg_dirpath
|
||||||
|
else:
|
||||||
|
if platform:
|
||||||
|
pkg_filename = "{:s}-{:s}-{:s}{:s}".format(
|
||||||
|
manifest.id,
|
||||||
|
manifest.version,
|
||||||
|
platform.replace("-", "_"),
|
||||||
|
PKG_EXT,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
pkg_filename = "{:s}-{:s}{:s}".format(
|
||||||
|
manifest.id,
|
||||||
|
manifest.version,
|
||||||
|
PKG_EXT,
|
||||||
|
)
|
||||||
|
outfile = os.path.join(pkg_output_dir, pkg_filename)
|
||||||
|
outfile_temp = os.path.join(pkg_output_dir, "." + pkg_filename)
|
||||||
|
|
||||||
|
request_exit |= message_status(msg_fn, "building: {:s}".format(pkg_filename))
|
||||||
|
if request_exit:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
with contextlib.closing(zip_fh_context) as zip_fh:
|
with CleanupPathsContext(files=(outfile_temp,), directories=()):
|
||||||
for filepath_abs, filepath_rel in build_paths:
|
try:
|
||||||
# Handy for testing that sub-directories:
|
zip_fh_context = zipfile.ZipFile(outfile_temp, 'w', zipfile.ZIP_DEFLATED, compresslevel=9)
|
||||||
# zip_fh.write(filepath_abs, manifest.id + "/" + filepath_rel)
|
except Exception as ex:
|
||||||
compress_type = zipfile.ZIP_STORED if filepath_skip_compress(filepath_abs) else None
|
message_status(msg_fn, "Error creating archive \"{:s}\"".format(str(ex)))
|
||||||
try:
|
|
||||||
zip_fh.write(filepath_abs, filepath_rel, compress_type=compress_type)
|
|
||||||
except Exception as ex:
|
|
||||||
message_status(msg_fn, "Error adding to archive \"{:s}\"".format(str(ex)))
|
|
||||||
return False
|
|
||||||
|
|
||||||
if verbose:
|
|
||||||
message_status(msg_fn, "add: {:s}".format(filepath_rel))
|
|
||||||
|
|
||||||
request_exit |= message_status(msg_fn, "complete")
|
|
||||||
if request_exit:
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if os.path.exists(outfile):
|
with contextlib.closing(zip_fh_context) as zip_fh:
|
||||||
os.unlink(outfile)
|
for filepath_abs, filepath_rel in build_paths_for_platform:
|
||||||
os.rename(outfile_temp, outfile)
|
|
||||||
|
|
||||||
message_status(msg_fn, "created \"{:s}\", {:d}".format(outfile, os.path.getsize(outfile)))
|
zip_data_override: Optional[bytes] = None
|
||||||
|
if platform and (filepath_rel == PKG_MANIFEST_FILENAME_TOML):
|
||||||
|
with open(filepath_abs, "rb") as temp_fh:
|
||||||
|
zip_data_override = temp_fh.read()
|
||||||
|
zip_data_override = zip_data_override + b"".join((
|
||||||
|
b"\n",
|
||||||
|
b"\n",
|
||||||
|
b"# BEGIN GENERATED CONTENT.\n",
|
||||||
|
b"# This must not be included in source manifests.\n",
|
||||||
|
b"[build.generated]\n",
|
||||||
|
"platforms = [\"{:s}\"]\n".format(platform).encode("utf-8"),
|
||||||
|
b"# END GENERATED CONTENT.\n",
|
||||||
|
))
|
||||||
|
|
||||||
|
# Handy for testing that sub-directories:
|
||||||
|
# zip_fh.write(filepath_abs, manifest.id + "/" + filepath_rel)
|
||||||
|
compress_type = zipfile.ZIP_STORED if filepath_skip_compress(filepath_abs) else None
|
||||||
|
try:
|
||||||
|
if zip_data_override is not None:
|
||||||
|
zip_fh.writestr(filepath_rel, zip_data_override, compress_type=compress_type)
|
||||||
|
else:
|
||||||
|
zip_fh.write(filepath_abs, filepath_rel, compress_type=compress_type)
|
||||||
|
except Exception as ex:
|
||||||
|
message_status(msg_fn, "Error adding to archive \"{:s}\"".format(str(ex)))
|
||||||
|
return False
|
||||||
|
|
||||||
|
if verbose:
|
||||||
|
message_status(msg_fn, "add: {:s}".format(filepath_rel))
|
||||||
|
|
||||||
|
request_exit |= message_status(msg_fn, "complete")
|
||||||
|
if request_exit:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if os.path.exists(outfile):
|
||||||
|
os.unlink(outfile)
|
||||||
|
os.rename(outfile_temp, outfile)
|
||||||
|
|
||||||
|
message_status(msg_fn, "created: \"{:s}\", {:d}".format(outfile, os.path.getsize(outfile)))
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -3375,6 +3607,7 @@ def unregister():
|
|||||||
pkg_source_dir=pkg_src_dir,
|
pkg_source_dir=pkg_src_dir,
|
||||||
pkg_output_dir=repo_dir,
|
pkg_output_dir=repo_dir,
|
||||||
pkg_output_filepath="",
|
pkg_output_filepath="",
|
||||||
|
split_platforms=False,
|
||||||
verbose=False,
|
verbose=False,
|
||||||
):
|
):
|
||||||
# Error running command.
|
# Error running command.
|
||||||
@ -3613,6 +3846,7 @@ def argparse_create_author_build(
|
|||||||
generic_arg_package_source_dir(subparse)
|
generic_arg_package_source_dir(subparse)
|
||||||
generic_arg_package_output_dir(subparse)
|
generic_arg_package_output_dir(subparse)
|
||||||
generic_arg_package_output_filepath(subparse)
|
generic_arg_package_output_filepath(subparse)
|
||||||
|
generic_arg_built_split_platforms(subparse)
|
||||||
generic_arg_verbose(subparse)
|
generic_arg_verbose(subparse)
|
||||||
|
|
||||||
if args_internal:
|
if args_internal:
|
||||||
@ -3624,6 +3858,7 @@ def argparse_create_author_build(
|
|||||||
pkg_source_dir=args.source_dir,
|
pkg_source_dir=args.source_dir,
|
||||||
pkg_output_dir=args.output_dir,
|
pkg_output_dir=args.output_dir,
|
||||||
pkg_output_filepath=args.output_filepath,
|
pkg_output_filepath=args.output_filepath,
|
||||||
|
split_platforms=args.split_platforms,
|
||||||
verbose=args.verbose,
|
verbose=args.verbose,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
@ -13,7 +13,9 @@ import shutil
|
|||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
|
import tomllib
|
||||||
import unittest
|
import unittest
|
||||||
|
import zipfile
|
||||||
|
|
||||||
import unittest.util
|
import unittest.util
|
||||||
|
|
||||||
@ -21,12 +23,19 @@ from typing import (
|
|||||||
Any,
|
Any,
|
||||||
Sequence,
|
Sequence,
|
||||||
Dict,
|
Dict,
|
||||||
|
List,
|
||||||
NamedTuple,
|
NamedTuple,
|
||||||
Optional,
|
Optional,
|
||||||
Set,
|
Set,
|
||||||
Tuple,
|
Tuple,
|
||||||
|
Union,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# A tree of files.
|
||||||
|
FileTree = Dict[str, Union["FileTree", bytes]]
|
||||||
|
|
||||||
|
JSON_OutputElem = Tuple[str, Any]
|
||||||
|
|
||||||
# For more useful output that isn't clipped.
|
# For more useful output that isn't clipped.
|
||||||
# pylint: disable-next=protected-access
|
# pylint: disable-next=protected-access
|
||||||
unittest.util._MAX_LENGTH = 10_000
|
unittest.util._MAX_LENGTH = 10_000
|
||||||
@ -116,16 +125,40 @@ def rmdir_contents(directory: str) -> None:
|
|||||||
os.unlink(filepath)
|
os.unlink(filepath)
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------------------------------------------------------
|
def manifest_dict_from_archive(filepath: str) -> Dict[str, Any]:
|
||||||
# HTTP Server (simulate remote access)
|
with zipfile.ZipFile(filepath, mode="r") as zip_fh:
|
||||||
#
|
manifest_data = zip_fh.read(PKG_MANIFEST_FILENAME_TOML)
|
||||||
|
manifest_dict = tomllib.loads(manifest_data.decode("utf-8"))
|
||||||
|
return manifest_dict
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------------------------------------------------------
|
# -----------------------------------------------------------------------------
|
||||||
# Generate Repository
|
# Generate Repository
|
||||||
#
|
#
|
||||||
|
|
||||||
|
|
||||||
def my_create_package(dirpath: str, filename: str, *, metadata: Dict[str, Any], files: Dict[str, bytes]) -> None:
|
def files_create_in_dir(basedir: str, files: FileTree) -> None:
|
||||||
|
if not os.path.isdir(basedir):
|
||||||
|
os.makedirs(basedir)
|
||||||
|
for filename_iter, data in files.items():
|
||||||
|
path = os.path.join(basedir, filename_iter)
|
||||||
|
if isinstance(data, bytes):
|
||||||
|
with open(path, "wb") as fh:
|
||||||
|
fh.write(data)
|
||||||
|
elif isinstance(data, dict):
|
||||||
|
files_create_in_dir(path, data)
|
||||||
|
else:
|
||||||
|
assert False, "Unreachable"
|
||||||
|
|
||||||
|
|
||||||
|
def my_create_package(
|
||||||
|
dirpath: str,
|
||||||
|
filename: str,
|
||||||
|
*,
|
||||||
|
metadata: Dict[str, Any],
|
||||||
|
files: FileTree,
|
||||||
|
build_args_extra: Tuple[str, ...],
|
||||||
|
) -> Sequence[JSON_OutputElem]:
|
||||||
"""
|
"""
|
||||||
Create a package using the command line interface.
|
Create a package using the command line interface.
|
||||||
"""
|
"""
|
||||||
@ -139,7 +172,7 @@ def my_create_package(dirpath: str, filename: str, *, metadata: Dict[str, Any],
|
|||||||
temp_dir_pkg_manifest_toml = os.path.join(temp_dir_pkg, PKG_MANIFEST_FILENAME_TOML)
|
temp_dir_pkg_manifest_toml = os.path.join(temp_dir_pkg, PKG_MANIFEST_FILENAME_TOML)
|
||||||
with open(temp_dir_pkg_manifest_toml, "wb") as fh:
|
with open(temp_dir_pkg_manifest_toml, "wb") as fh:
|
||||||
# NOTE: escaping is not supported, this is primitive TOML writing for tests.
|
# NOTE: escaping is not supported, this is primitive TOML writing for tests.
|
||||||
data = "".join((
|
data_list = [
|
||||||
"""# Example\n""",
|
"""# Example\n""",
|
||||||
"""schema_version = "{:s}"\n""".format(metadata_copy.pop("schema_version")),
|
"""schema_version = "{:s}"\n""".format(metadata_copy.pop("schema_version")),
|
||||||
"""id = "{:s}"\n""".format(metadata_copy.pop("id")),
|
"""id = "{:s}"\n""".format(metadata_copy.pop("id")),
|
||||||
@ -151,21 +184,27 @@ def my_create_package(dirpath: str, filename: str, *, metadata: Dict[str, Any],
|
|||||||
"""blender_version_min = "{:s}"\n""".format(metadata_copy.pop("blender_version_min")),
|
"""blender_version_min = "{:s}"\n""".format(metadata_copy.pop("blender_version_min")),
|
||||||
"""maintainer = "{:s}"\n""".format(metadata_copy.pop("maintainer")),
|
"""maintainer = "{:s}"\n""".format(metadata_copy.pop("maintainer")),
|
||||||
"""license = [{:s}]\n""".format(", ".join("\"{:s}\"".format(v) for v in metadata_copy.pop("license"))),
|
"""license = [{:s}]\n""".format(", ".join("\"{:s}\"".format(v) for v in metadata_copy.pop("license"))),
|
||||||
)).encode('utf-8')
|
]
|
||||||
fh.write(data)
|
|
||||||
|
if (value := metadata_copy.pop("platforms", None)) is not None:
|
||||||
|
data_list.append("""platforms = [{:s}]\n""".format(", ".join("\"{:s}\"".format(v) for v in value)))
|
||||||
|
|
||||||
|
if (value := metadata_copy.pop("wheels", None)) is not None:
|
||||||
|
data_list.append("""wheels = [{:s}]\n""".format(", ".join("\"{:s}\"".format(v) for v in value)))
|
||||||
|
|
||||||
|
fh.write("".join(data_list).encode('utf-8'))
|
||||||
|
|
||||||
if metadata_copy:
|
if metadata_copy:
|
||||||
raise Exception("Unexpected mata-data: {!r}".format(metadata_copy))
|
raise Exception("Unexpected mata-data: {!r}".format(metadata_copy))
|
||||||
|
|
||||||
for filename_iter, data in files.items():
|
files_create_in_dir(temp_dir_pkg, files)
|
||||||
with open(os.path.join(temp_dir_pkg, filename_iter), "wb") as fh:
|
|
||||||
fh.write(data)
|
|
||||||
|
|
||||||
output_json = command_output_from_json_0(
|
output_json = command_output_from_json_0(
|
||||||
[
|
[
|
||||||
"build",
|
"build",
|
||||||
"--source-dir", temp_dir_pkg,
|
"--source-dir", temp_dir_pkg,
|
||||||
"--output-filepath", outfile,
|
"--output-filepath", outfile,
|
||||||
|
*build_args_extra,
|
||||||
],
|
],
|
||||||
exclude_types={"PROGRESS"},
|
exclude_types={"PROGRESS"},
|
||||||
)
|
)
|
||||||
@ -178,6 +217,8 @@ def my_create_package(dirpath: str, filename: str, *, metadata: Dict[str, Any],
|
|||||||
if output_json_error:
|
if output_json_error:
|
||||||
raise Exception("Creating a package produced some error output: {!r}".format(output_json_error))
|
raise Exception("Creating a package produced some error output: {!r}".format(output_json_error))
|
||||||
|
|
||||||
|
return output_json
|
||||||
|
|
||||||
|
|
||||||
class PkgTemplate(NamedTuple):
|
class PkgTemplate(NamedTuple):
|
||||||
"""Data need to create a package for testing."""
|
"""Data need to create a package for testing."""
|
||||||
@ -209,20 +250,21 @@ def my_generate_repo(
|
|||||||
files={
|
files={
|
||||||
"__init__.py": b"# This is a script\n",
|
"__init__.py": b"# This is a script\n",
|
||||||
},
|
},
|
||||||
|
build_args_extra=(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def command_output_filter_include(
|
def command_output_filter_include(
|
||||||
output_json: Sequence[Tuple[str, Any]],
|
output_json: Sequence[JSON_OutputElem],
|
||||||
include_types: Set[str],
|
include_types: Set[str],
|
||||||
) -> Sequence[Tuple[str, Any]]:
|
) -> Sequence[JSON_OutputElem]:
|
||||||
return [(a, b) for a, b in output_json if a in include_types]
|
return [(a, b) for a, b in output_json if a in include_types]
|
||||||
|
|
||||||
|
|
||||||
def command_output_filter_exclude(
|
def command_output_filter_exclude(
|
||||||
output_json: Sequence[Tuple[str, Any]],
|
output_json: Sequence[JSON_OutputElem],
|
||||||
exclude_types: Set[str],
|
exclude_types: Set[str],
|
||||||
) -> Sequence[Tuple[str, Any]]:
|
) -> Sequence[JSON_OutputElem]:
|
||||||
return [(a, b) for a, b in output_json if a not in exclude_types]
|
return [(a, b) for a, b in output_json if a not in exclude_types]
|
||||||
|
|
||||||
|
|
||||||
@ -248,7 +290,7 @@ def command_output_from_json_0(
|
|||||||
*,
|
*,
|
||||||
exclude_types: Optional[Set[str]] = None,
|
exclude_types: Optional[Set[str]] = None,
|
||||||
expected_returncode: int = 0,
|
expected_returncode: int = 0,
|
||||||
) -> Sequence[Tuple[str, Any]]:
|
) -> Sequence[JSON_OutputElem]:
|
||||||
result = []
|
result = []
|
||||||
|
|
||||||
proc = subprocess.run(
|
proc = subprocess.run(
|
||||||
@ -278,6 +320,115 @@ class TestCLI(unittest.TestCase):
|
|||||||
self.assertEqual(command_output(["--version"]), "0.1\n")
|
self.assertEqual(command_output(["--version"]), "0.1\n")
|
||||||
|
|
||||||
|
|
||||||
|
class TestCLI_Build(unittest.TestCase):
|
||||||
|
|
||||||
|
dirpath = ""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def setUpClass(cls) -> None:
|
||||||
|
cls.dirpath = TEMP_DIR_LOCAL
|
||||||
|
if os.path.isdir(cls.dirpath):
|
||||||
|
rmdir_contents(TEMP_DIR_LOCAL)
|
||||||
|
else:
|
||||||
|
os.makedirs(TEMP_DIR_LOCAL)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def tearDownClass(cls) -> None:
|
||||||
|
if os.path.isdir(cls.dirpath):
|
||||||
|
rmdir_contents(TEMP_DIR_LOCAL)
|
||||||
|
|
||||||
|
def test_build_multi_platform(self) -> None:
|
||||||
|
platforms = [
|
||||||
|
"linux-arm64",
|
||||||
|
"linux-x64",
|
||||||
|
"macos-arm64",
|
||||||
|
"macos-x64",
|
||||||
|
"windows-arm64",
|
||||||
|
"windows-x64",
|
||||||
|
]
|
||||||
|
wheels = [
|
||||||
|
# Must be included in all packages.
|
||||||
|
"my_portable_package-3.0.1-py3-none-any.whl",
|
||||||
|
# Each package must include only one.
|
||||||
|
"my_platform_package-10.3.0-cp311-cp311-macosx_11_0_arm64.whl",
|
||||||
|
"my_platform_package-10.3.0-cp311-cp311-macosx_11_0_x86_64.whl",
|
||||||
|
"my_platform_package-10.3.0-cp311-cp311-manylinux_2_28_aarch64.whl",
|
||||||
|
"my_platform_package-10.3.0-cp311-cp311-manylinux_2_28_x86_64.whl",
|
||||||
|
"my_platform_package-10.3.0-cp311-cp311-win_amd64.whl",
|
||||||
|
"my_platform_package-10.3.0-cp311-cp311-win_arm64.whl",
|
||||||
|
]
|
||||||
|
|
||||||
|
pkg_idname = "my_test"
|
||||||
|
output_json = my_create_package(
|
||||||
|
self.dirpath,
|
||||||
|
pkg_idname + PKG_EXT,
|
||||||
|
metadata={
|
||||||
|
"schema_version": "1.0.0",
|
||||||
|
"id": "multi_platform_test",
|
||||||
|
"name": "Multi Platform Test",
|
||||||
|
"tagline": """This package has a tagline""",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"type": "add-on",
|
||||||
|
"tags": ["UV", "Modeling"],
|
||||||
|
"blender_version_min": "0.0.0",
|
||||||
|
"maintainer": "Some Developer",
|
||||||
|
"license": ["SPDX:GPL-2.0-or-later"],
|
||||||
|
"platforms": platforms,
|
||||||
|
"wheels": ["./wheels/" + filename for filename in wheels]
|
||||||
|
},
|
||||||
|
files={
|
||||||
|
"__init__.py": b"# This is a script\n",
|
||||||
|
"wheels": {filename: b"" for filename in wheels},
|
||||||
|
},
|
||||||
|
build_args_extra=(
|
||||||
|
# Include `add: {...}` so the file list can be scanned.
|
||||||
|
"--verbose",
|
||||||
|
"--split-platforms",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
output_json = command_output_filter_include(
|
||||||
|
output_json,
|
||||||
|
include_types={'STATUS'},
|
||||||
|
)
|
||||||
|
|
||||||
|
packages: List[Tuple[str, List[JSON_OutputElem]]] = [("", [])]
|
||||||
|
for _, message in output_json:
|
||||||
|
if message.startswith("building: "):
|
||||||
|
assert not packages[-1][0]
|
||||||
|
assert not packages[-1][1]
|
||||||
|
packages[-1] = (message.removeprefix("building: "), [])
|
||||||
|
elif message.startswith("add: "):
|
||||||
|
packages[-1][1].append(message.removeprefix("add: "))
|
||||||
|
elif message.startswith("created: "):
|
||||||
|
pass
|
||||||
|
elif message == "complete":
|
||||||
|
packages.append(("", []))
|
||||||
|
else:
|
||||||
|
raise Exception("Unexpected status: {:s}".format(message))
|
||||||
|
|
||||||
|
packages_dict = dict(packages)
|
||||||
|
for platform in platforms:
|
||||||
|
filename = "{:s}-{:s}{:s}".format(pkg_idname, platform.replace("-", "_"), PKG_EXT)
|
||||||
|
value = packages_dict.get(filename)
|
||||||
|
assert isinstance(value, list)
|
||||||
|
# A check here that gives a better error would be nice, for now, check there are always 4 files.
|
||||||
|
self.assertEqual(len(value), 4)
|
||||||
|
|
||||||
|
manifest_dict = manifest_dict_from_archive(os.path.join(self.dirpath, filename))
|
||||||
|
|
||||||
|
# Ensure the generated data is included:
|
||||||
|
# `[build.generated]`
|
||||||
|
# `platforms = [{platform}]`
|
||||||
|
build_value = manifest_dict.get("build")
|
||||||
|
assert build_value is not None
|
||||||
|
build_generated_value = build_value.get("generated")
|
||||||
|
assert build_generated_value is not None
|
||||||
|
build_generated_platforms_value = build_generated_value.get("platforms")
|
||||||
|
assert build_generated_platforms_value is not None
|
||||||
|
self.assertEqual(build_generated_platforms_value, [platform])
|
||||||
|
|
||||||
|
|
||||||
class TestCLI_WithRepo(unittest.TestCase):
|
class TestCLI_WithRepo(unittest.TestCase):
|
||||||
dirpath = ""
|
dirpath = ""
|
||||||
dirpath_url = ""
|
dirpath_url = ""
|
||||||
@ -439,6 +590,9 @@ class TestCLI_WithRepo(unittest.TestCase):
|
|||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
if USE_HTTP:
|
if USE_HTTP:
|
||||||
|
# This doesn't take advantage of a HTTP client/server.
|
||||||
|
del TestCLI_Build
|
||||||
|
|
||||||
with HTTPServerContext(directory=TEMP_DIR_REMOTE, port=HTTP_PORT):
|
with HTTPServerContext(directory=TEMP_DIR_REMOTE, port=HTTP_PORT):
|
||||||
unittest.main()
|
unittest.main()
|
||||||
else:
|
else:
|
||||||
|
@ -36,6 +36,8 @@ from typing import (
|
|||||||
# pylint: disable-next=protected-access
|
# pylint: disable-next=protected-access
|
||||||
unittest.util._MAX_LENGTH = 10_000
|
unittest.util._MAX_LENGTH = 10_000
|
||||||
|
|
||||||
|
PKG_EXT = ".zip"
|
||||||
|
|
||||||
PKG_MANIFEST_FILENAME_TOML = "blender_manifest.toml"
|
PKG_MANIFEST_FILENAME_TOML = "blender_manifest.toml"
|
||||||
|
|
||||||
VERBOSE_CMD = False
|
VERBOSE_CMD = False
|
||||||
@ -401,7 +403,7 @@ class TestWithTempBlenderUser_MixIn(unittest.TestCase):
|
|||||||
) -> None:
|
) -> None:
|
||||||
if pkg_filename is None:
|
if pkg_filename is None:
|
||||||
pkg_filename = pkg_idname
|
pkg_filename = pkg_idname
|
||||||
pkg_output_filepath = os.path.join(TEMP_DIR_REMOTE, pkg_filename + ".zip")
|
pkg_output_filepath = os.path.join(TEMP_DIR_REMOTE, pkg_filename + PKG_EXT)
|
||||||
with tempfile.TemporaryDirectory() as package_build_dir:
|
with tempfile.TemporaryDirectory() as package_build_dir:
|
||||||
create_package(
|
create_package(
|
||||||
package_build_dir,
|
package_build_dir,
|
||||||
@ -422,10 +424,10 @@ class TestWithTempBlenderUser_MixIn(unittest.TestCase):
|
|||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
stdout,
|
stdout,
|
||||||
(
|
(
|
||||||
"Building {:s}.zip\n"
|
"building: {:s}{:s}\n"
|
||||||
"complete\n"
|
"complete\n"
|
||||||
"created \"{:s}\", {:d}\n"
|
"created: \"{:s}\", {:d}\n"
|
||||||
).format(pkg_filename, pkg_output_filepath, os.path.getsize(pkg_output_filepath)),
|
).format(pkg_filename, PKG_EXT, pkg_output_filepath, os.path.getsize(pkg_output_filepath)),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user