Extensions: add missing checks for errors in IO, JSON & TOML data
Exceptions need be caught and forwarded so they show it in Blender's interface. While most common errors where accounted for, various IO errors & malformed JSON/TOML could cause internal operations to fail with unhandled exceptions.
This commit is contained in:
parent
11b1919bcf
commit
5877efbec3
@ -1475,21 +1475,21 @@ class _RepoDataSouce_JSON(_RepoDataSouce_ABC):
|
|||||||
data_dict = json_from_filepath(self._filepath) or {}
|
data_dict = json_from_filepath(self._filepath) or {}
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
error_fn(ex)
|
error_fn(ex)
|
||||||
|
else:
|
||||||
|
# This is *not* a full validation,
|
||||||
|
# just skip malformed JSON files as they're likely to cause issues later on.
|
||||||
|
if not isinstance(data_dict, dict):
|
||||||
|
error_fn(Exception("Remote repository data from {:s} must be a dict not a {:s}".format(
|
||||||
|
self._filepath,
|
||||||
|
str(type(data_dict)),
|
||||||
|
)))
|
||||||
|
data_dict = {}
|
||||||
|
|
||||||
# This is *not* a full validation,
|
if not isinstance(data_dict.get("data"), list):
|
||||||
# just skip malformed JSON files as they're likely to cause issues later on.
|
error_fn(Exception("Remote repository data from {:s} must contain a \"data\" list".format(
|
||||||
if not isinstance(data_dict, dict):
|
self._filepath,
|
||||||
error_fn(Exception("Remote repository data from {:s} must be a dict not a {:s}".format(
|
)))
|
||||||
self._filepath,
|
data_dict = {}
|
||||||
str(type(data_dict)),
|
|
||||||
)))
|
|
||||||
data_dict = {}
|
|
||||||
|
|
||||||
if not isinstance(data_dict.get("data"), list):
|
|
||||||
error_fn(Exception("Remote repository data from {:s} must contain a \"data\" list".format(
|
|
||||||
self._filepath,
|
|
||||||
)))
|
|
||||||
data_dict = {}
|
|
||||||
|
|
||||||
# It's important to assign this value even if it's "empty",
|
# It's important to assign this value even if it's "empty",
|
||||||
# otherwise corrupt files will be detected as unset and continuously attempt to load.
|
# otherwise corrupt files will be detected as unset and continuously attempt to load.
|
||||||
@ -2036,7 +2036,12 @@ class RepoLock:
|
|||||||
|
|
||||||
# This most likely exists, create if it doesn't.
|
# This most likely exists, create if it doesn't.
|
||||||
if not os.path.isdir(local_private_dir):
|
if not os.path.isdir(local_private_dir):
|
||||||
os.makedirs(local_private_dir)
|
try:
|
||||||
|
os.makedirs(local_private_dir)
|
||||||
|
except Exception as ex:
|
||||||
|
# Likely no permissions or read-only file-system.
|
||||||
|
result[directory] = "Lock directory could not be created: {:s}".format(str(ex))
|
||||||
|
continue
|
||||||
|
|
||||||
local_lock_file = os.path.join(local_private_dir, REPO_LOCAL_PRIVATE_LOCK)
|
local_lock_file = os.path.join(local_private_dir, REPO_LOCAL_PRIVATE_LOCK)
|
||||||
# Attempt to get the lock, kick out stale locks.
|
# Attempt to get the lock, kick out stale locks.
|
||||||
|
@ -436,16 +436,29 @@ def random_acii_lines(*, seed: Union[int, str], width: int) -> Generator[str, No
|
|||||||
yield "".join(chars_list[:width])
|
yield "".join(chars_list[:width])
|
||||||
|
|
||||||
|
|
||||||
def sha256_from_file(filepath: str, block_size: int = 1 << 20, hash_prefix: bool = False) -> Tuple[int, str]:
|
def sha256_from_file_or_error(
|
||||||
|
filepath: str,
|
||||||
|
block_size: int = 1 << 20,
|
||||||
|
hash_prefix: bool = False,
|
||||||
|
) -> Union[Tuple[int, str], str]:
|
||||||
"""
|
"""
|
||||||
Returns an arbitrary sized unique ASCII string based on the file contents.
|
Returns an arbitrary sized unique ASCII string based on the file contents.
|
||||||
(exact hashing method may change).
|
(exact hashing method may change).
|
||||||
"""
|
"""
|
||||||
with open(filepath, 'rb') as fh:
|
try:
|
||||||
|
fh_context = open(filepath, 'rb')
|
||||||
|
except Exception as ex:
|
||||||
|
return "error opening file: {:s}".format(str(ex))
|
||||||
|
|
||||||
|
with contextlib.closing(fh_context) as fh:
|
||||||
size = 0
|
size = 0
|
||||||
sha256 = hashlib.new('sha256')
|
sha256 = hashlib.new('sha256')
|
||||||
while True:
|
while True:
|
||||||
data = fh.read(block_size)
|
try:
|
||||||
|
data = fh.read(block_size)
|
||||||
|
except Exception as ex:
|
||||||
|
return "error reading file: {:s}".format(str(ex))
|
||||||
|
|
||||||
if not data:
|
if not data:
|
||||||
break
|
break
|
||||||
sha256.update(data)
|
sha256.update(data)
|
||||||
@ -759,14 +772,12 @@ def pkg_manifest_from_zipfile_and_validate_impl(
|
|||||||
if file_content is None:
|
if file_content is None:
|
||||||
return ["Archive does not contain a manifest"]
|
return ["Archive does not contain a manifest"]
|
||||||
|
|
||||||
manifest_dict = toml_from_bytes(file_content)
|
if isinstance((manifest_dict := toml_from_bytes_or_error(file_content)), str):
|
||||||
assert isinstance(manifest_dict, dict)
|
return ["Archive contains a manifest that could not be parsed {:s}".format(manifest_dict)]
|
||||||
|
|
||||||
|
assert isinstance(manifest_dict, dict)
|
||||||
pkg_manifest_dict_apply_build_generated_table(manifest_dict)
|
pkg_manifest_dict_apply_build_generated_table(manifest_dict)
|
||||||
|
|
||||||
# TODO: forward actual error.
|
|
||||||
if manifest_dict is None:
|
|
||||||
return ["Archive does not contain a manifest"]
|
|
||||||
return pkg_manifest_from_dict_and_validate_impl(
|
return pkg_manifest_from_dict_and_validate_impl(
|
||||||
manifest_dict,
|
manifest_dict,
|
||||||
from_repo=False,
|
from_repo=False,
|
||||||
@ -2311,17 +2322,23 @@ def pkg_manifest_detect_duplicates(pkg_idname: str, pkg_items: List[PkgManifest]
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def toml_from_bytes(data: bytes) -> Optional[Dict[str, Any]]:
|
def toml_from_bytes_or_error(data: bytes) -> Union[Dict[str, Any], str]:
|
||||||
result = tomllib.loads(data.decode('utf-8'))
|
try:
|
||||||
assert isinstance(result, dict)
|
result = tomllib.loads(data.decode('utf-8'))
|
||||||
return result
|
assert isinstance(result, dict)
|
||||||
|
return result
|
||||||
|
except Exception as ex:
|
||||||
|
return str(ex)
|
||||||
|
|
||||||
|
|
||||||
def toml_from_filepath(filepath: str) -> Optional[Dict[str, Any]]:
|
def toml_from_filepath_or_error(filepath: str) -> Union[Dict[str, Any], str]:
|
||||||
with open(filepath, "rb") as fh:
|
try:
|
||||||
data = fh.read()
|
with open(filepath, "rb") as fh:
|
||||||
result = toml_from_bytes(data)
|
data = fh.read()
|
||||||
return result
|
result = toml_from_bytes_or_error(data)
|
||||||
|
return result
|
||||||
|
except Exception as ex:
|
||||||
|
return str(ex)
|
||||||
|
|
||||||
|
|
||||||
def repo_local_private_dir(*, local_dir: str) -> str:
|
def repo_local_private_dir(*, local_dir: str) -> str:
|
||||||
@ -2331,25 +2348,48 @@ def repo_local_private_dir(*, local_dir: str) -> str:
|
|||||||
return os.path.join(local_dir, REPO_LOCAL_PRIVATE_DIR)
|
return os.path.join(local_dir, REPO_LOCAL_PRIVATE_DIR)
|
||||||
|
|
||||||
|
|
||||||
def repo_local_private_dir_ensure(*, local_dir: str) -> str:
|
def repo_local_private_dir_ensure(
|
||||||
|
*,
|
||||||
|
local_dir: str,
|
||||||
|
error_fn: Callable[[Exception], None],
|
||||||
|
) -> Optional[str]:
|
||||||
"""
|
"""
|
||||||
Ensure the repos hidden directory exists.
|
Ensure the repos hidden directory exists.
|
||||||
"""
|
"""
|
||||||
local_private_dir = repo_local_private_dir(local_dir=local_dir)
|
local_private_dir = repo_local_private_dir(local_dir=local_dir)
|
||||||
if not os.path.isdir(local_private_dir):
|
if not os.path.isdir(local_private_dir):
|
||||||
# Unlikely but possible `local_dir` is missing.
|
# Unlikely but possible `local_dir` is missing.
|
||||||
os.makedirs(local_private_dir)
|
try:
|
||||||
|
os.makedirs(local_private_dir)
|
||||||
|
except Exception as ex:
|
||||||
|
error_fn(ex)
|
||||||
|
return None
|
||||||
return local_private_dir
|
return local_private_dir
|
||||||
|
|
||||||
|
|
||||||
def repo_local_private_dir_ensure_with_subdir(*, local_dir: str, subdir: str) -> str:
|
def repo_local_private_dir_ensure_with_subdir(
|
||||||
|
*,
|
||||||
|
local_dir: str,
|
||||||
|
subdir: str,
|
||||||
|
error_fn: Callable[[Exception], None],
|
||||||
|
) -> Optional[str]:
|
||||||
"""
|
"""
|
||||||
Return a local directory used to cache package downloads.
|
Return a local directory used to cache package downloads.
|
||||||
"""
|
"""
|
||||||
local_private_subdir = os.path.join(repo_local_private_dir_ensure(local_dir=local_dir), subdir)
|
if (local_private_dir := repo_local_private_dir_ensure(
|
||||||
|
local_dir=local_dir,
|
||||||
|
error_fn=error_fn,
|
||||||
|
)) is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
local_private_subdir = os.path.join(local_private_dir, subdir)
|
||||||
if not os.path.isdir(local_private_subdir):
|
if not os.path.isdir(local_private_subdir):
|
||||||
# Unlikely but possible `local_dir` is missing.
|
# Unlikely but possible `local_dir` is missing.
|
||||||
os.makedirs(local_private_subdir)
|
try:
|
||||||
|
os.makedirs(local_private_subdir)
|
||||||
|
except Exception as ex:
|
||||||
|
error_fn(ex)
|
||||||
|
return None
|
||||||
return local_private_subdir
|
return local_private_subdir
|
||||||
|
|
||||||
|
|
||||||
@ -2379,9 +2419,16 @@ def repo_sync_from_remote(
|
|||||||
if request_exit:
|
if request_exit:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
if (local_private_dir := repo_local_private_dir_ensure(
|
||||||
|
local_dir=local_dir,
|
||||||
|
error_fn=lambda ex: any_as_none(
|
||||||
|
msglog.fatal_error("Error creating private directory: {:s}".format(str(ex)))
|
||||||
|
),
|
||||||
|
)) is None:
|
||||||
|
return False
|
||||||
|
|
||||||
remote_json_url = remote_url_get(remote_url)
|
remote_json_url = remote_url_get(remote_url)
|
||||||
|
|
||||||
local_private_dir = repo_local_private_dir_ensure(local_dir=local_dir)
|
|
||||||
local_json_path = os.path.join(local_private_dir, PKG_REPO_LIST_FILENAME)
|
local_json_path = os.path.join(local_private_dir, PKG_REPO_LIST_FILENAME)
|
||||||
local_json_path_temp = local_json_path + "@"
|
local_json_path_temp = local_json_path + "@"
|
||||||
|
|
||||||
@ -2451,36 +2498,48 @@ def repo_sync_from_remote(
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def repo_pkginfo_from_local_as_dict(*, local_dir: str) -> Optional[Dict[str, Any]]:
|
def repo_pkginfo_from_local_as_dict_or_error(*, local_dir: str) -> Union[Dict[str, Any], str]:
|
||||||
"""
|
"""
|
||||||
Load package cache.
|
Load package cache.
|
||||||
"""
|
"""
|
||||||
local_private_dir = repo_local_private_dir(local_dir=local_dir)
|
local_private_dir = repo_local_private_dir(local_dir=local_dir)
|
||||||
local_json_path = os.path.join(local_private_dir, PKG_REPO_LIST_FILENAME)
|
local_json_path = os.path.join(local_private_dir, PKG_REPO_LIST_FILENAME)
|
||||||
if not os.path.exists(local_json_path):
|
|
||||||
return None
|
|
||||||
|
|
||||||
with open(local_json_path, "r", encoding="utf-8") as fh:
|
# Don't check if the path exists, allow this to raise an exception.
|
||||||
result = json.load(fh)
|
try:
|
||||||
assert isinstance(result, dict)
|
with open(local_json_path, "r", encoding="utf-8") as fh:
|
||||||
|
result = json.load(fh)
|
||||||
|
except Exception as ex:
|
||||||
|
return str(ex)
|
||||||
|
|
||||||
|
if not isinstance(result, dict):
|
||||||
|
return "expected a dict, not a {:s}".format(str(type(result)))
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def pkg_repo_dat_from_json(json_data: Dict[str, Any]) -> PkgRepoData:
|
def pkg_repo_data_from_json_or_error(json_data: Dict[str, Any]) -> Union[PkgRepoData, str]:
|
||||||
|
if not isinstance((version := json_data.get("version", "v1")), str):
|
||||||
|
return "expected \"version\" to be a string"
|
||||||
|
|
||||||
|
if not isinstance((blocklist := json_data.get("blocklist", [])), list):
|
||||||
|
return "expected \"blocklist\" to be a list"
|
||||||
|
|
||||||
|
if not isinstance((data := json_data.get("data", [])), list):
|
||||||
|
return "expected \"data\" to be a list"
|
||||||
|
|
||||||
result_new = PkgRepoData(
|
result_new = PkgRepoData(
|
||||||
version=json_data.get("version", "v1"),
|
version=version,
|
||||||
blocklist=json_data.get("blocklist", []),
|
blocklist=blocklist,
|
||||||
data=json_data.get("data", []),
|
data=data,
|
||||||
)
|
)
|
||||||
return result_new
|
return result_new
|
||||||
|
|
||||||
|
|
||||||
def repo_pkginfo_from_local(*, local_dir: str) -> Optional[PkgRepoData]:
|
def repo_pkginfo_from_local_or_none(*, local_dir: str) -> Union[PkgRepoData, str]:
|
||||||
result = repo_pkginfo_from_local_as_dict(local_dir=local_dir)
|
if isinstance((result := repo_pkginfo_from_local_as_dict_or_error(local_dir=local_dir)), str):
|
||||||
if result is None:
|
return result
|
||||||
return None
|
return pkg_repo_data_from_json_or_error(result)
|
||||||
return pkg_repo_dat_from_json(result)
|
|
||||||
|
|
||||||
|
|
||||||
def url_has_known_prefix(path: str) -> bool:
|
def url_has_known_prefix(path: str) -> bool:
|
||||||
@ -3028,8 +3087,13 @@ class subcmd_server:
|
|||||||
return False
|
return False
|
||||||
del template
|
del template
|
||||||
|
|
||||||
with open(filepath_repo_html, "w", encoding="utf-8") as fh_html:
|
try:
|
||||||
fh_html.write(result)
|
with open(filepath_repo_html, "w", encoding="utf-8") as fh_html:
|
||||||
|
fh_html.write(result)
|
||||||
|
except Exception as ex:
|
||||||
|
msglog.fatal_error("HTML failed to write: {:s}".format(str(ex)))
|
||||||
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -3100,10 +3164,11 @@ class subcmd_server:
|
|||||||
manifest_dict["archive_url"] = "./" + urllib.request.pathname2url(filename)
|
manifest_dict["archive_url"] = "./" + urllib.request.pathname2url(filename)
|
||||||
|
|
||||||
# Add archive variables, see: `PkgManifest_Archive`.
|
# Add archive variables, see: `PkgManifest_Archive`.
|
||||||
(
|
if isinstance((result := sha256_from_file_or_error(filepath, hash_prefix=True)), str):
|
||||||
manifest_dict["archive_size"],
|
msglog.error("unable to calculate hash ({:s}): {:s}".format(result, filepath))
|
||||||
manifest_dict["archive_hash"],
|
continue
|
||||||
) = sha256_from_file(filepath, hash_prefix=True)
|
manifest_dict["archive_size"], manifest_dict["archive_hash"] = result
|
||||||
|
del result
|
||||||
|
|
||||||
repo_data.append(manifest_dict)
|
repo_data.append(manifest_dict)
|
||||||
|
|
||||||
@ -3128,8 +3193,13 @@ class subcmd_server:
|
|||||||
|
|
||||||
filepath_repo_json = os.path.join(repo_dir, PKG_REPO_LIST_FILENAME)
|
filepath_repo_json = os.path.join(repo_dir, PKG_REPO_LIST_FILENAME)
|
||||||
|
|
||||||
with open(filepath_repo_json, "w", encoding="utf-8") as fh:
|
try:
|
||||||
json.dump(repo_gen_dict, fh, indent=2)
|
with open(filepath_repo_json, "w", encoding="utf-8") as fh:
|
||||||
|
json.dump(repo_gen_dict, fh, indent=2)
|
||||||
|
except Exception as ex:
|
||||||
|
msglog.fatal_error("failed to write repository: {:s}".format(str(ex)))
|
||||||
|
return False
|
||||||
|
|
||||||
msglog.status("found {:d} packages.".format(len(repo_data)))
|
msglog.status("found {:d} packages.".format(len(repo_data)))
|
||||||
|
|
||||||
return True
|
return True
|
||||||
@ -3180,10 +3250,19 @@ class subcmd_client:
|
|||||||
msglog.fatal_error(msg)
|
msglog.fatal_error(msg)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
result_str = result.getvalue().decode("utf-8")
|
result_bytes = result.getvalue()
|
||||||
del result
|
del result
|
||||||
|
|
||||||
repo_gen_dict = pkg_repo_dat_from_json(json.loads(result_str))
|
try:
|
||||||
|
result_dict = json.loads(result_bytes)
|
||||||
|
except Exception as ex:
|
||||||
|
msglog.fatal_error("error loading JSON {:s}".format(str(ex)))
|
||||||
|
return False
|
||||||
|
|
||||||
|
if isinstance((repo_gen_dict := pkg_repo_data_from_json_or_error(result_dict)), str):
|
||||||
|
msglog.fatal_error("unexpected contants in JSON {:s}".format(repo_gen_dict))
|
||||||
|
return False
|
||||||
|
del result_dict
|
||||||
|
|
||||||
items: List[Dict[str, Any]] = repo_gen_dict.data
|
items: List[Dict[str, Any]] = repo_gen_dict.data
|
||||||
items.sort(key=lambda elem: elem.get("id", ""))
|
items.sort(key=lambda elem: elem.get("id", ""))
|
||||||
@ -3289,10 +3368,12 @@ class subcmd_client:
|
|||||||
manifest._asdict(),
|
manifest._asdict(),
|
||||||
filter_blender_version=blender_version_tuple,
|
filter_blender_version=blender_version_tuple,
|
||||||
filter_platform=platform_from_this_system(),
|
filter_platform=platform_from_this_system(),
|
||||||
skip_message_fn=lambda message:
|
skip_message_fn=lambda message: any_as_none(
|
||||||
any_as_none(msglog.error("{:s}: {:s}".format(manifest.id, message))),
|
msglog.error("{:s}: {:s}".format(manifest.id, message))
|
||||||
error_fn=lambda ex:
|
),
|
||||||
any_as_none(msglog.error("{:s}: {:s}".format(manifest.id, str(ex)))),
|
error_fn=lambda ex: any_as_none(
|
||||||
|
msglog.error("{:s}: {:s}".format(manifest.id, str(ex)))
|
||||||
|
),
|
||||||
):
|
):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -3402,9 +3483,18 @@ class subcmd_client:
|
|||||||
assert isinstance(blender_version_tuple, tuple)
|
assert isinstance(blender_version_tuple, tuple)
|
||||||
|
|
||||||
# Extract...
|
# Extract...
|
||||||
pkg_repo_data = repo_pkginfo_from_local(local_dir=local_dir)
|
if isinstance((pkg_repo_data := repo_pkginfo_from_local_or_none(local_dir=local_dir)), str):
|
||||||
if pkg_repo_data is None:
|
msglog.fatal_error("Error loading package repository: {:s}".format(pkg_repo_data))
|
||||||
# TODO: raise warning.
|
return False
|
||||||
|
|
||||||
|
# Ensure a private directory so a local cache can be created.
|
||||||
|
if (local_cache_dir := repo_local_private_dir_ensure_with_subdir(
|
||||||
|
local_dir=local_dir,
|
||||||
|
subdir="cache",
|
||||||
|
error_fn=lambda ex: any_as_none(
|
||||||
|
msglog.fatal_error("Error creating cache directory: {:s}".format(str(ex)))
|
||||||
|
),
|
||||||
|
)) is None:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Most likely this doesn't have duplicates,but any errors procured by duplicates
|
# Most likely this doesn't have duplicates,but any errors procured by duplicates
|
||||||
@ -3412,16 +3502,14 @@ class subcmd_client:
|
|||||||
packages_as_set = set(packages)
|
packages_as_set = set(packages)
|
||||||
packages = tuple(sorted(packages_as_set))
|
packages = tuple(sorted(packages_as_set))
|
||||||
|
|
||||||
# Ensure a private directory so a local cache can be created.
|
|
||||||
local_cache_dir = repo_local_private_dir_ensure_with_subdir(local_dir=local_dir, subdir="cache")
|
|
||||||
|
|
||||||
# Needed so relative paths can be properly calculated.
|
# Needed so relative paths can be properly calculated.
|
||||||
remote_url_strip = remote_url_params_strip(remote_url)
|
remote_url_strip = remote_url_params_strip(remote_url)
|
||||||
|
|
||||||
# TODO: filter by version and platform.
|
# TODO: filter by version and platform.
|
||||||
json_data_pkg_info = [
|
json_data_pkg_info = [
|
||||||
pkg_info for pkg_info in pkg_repo_data.data
|
pkg_info for pkg_info in pkg_repo_data.data
|
||||||
if pkg_info["id"] in packages_as_set
|
# The `id` key should always exist, avoid raising an unhandled exception here if it's not.
|
||||||
|
if pkg_info.get("id", "") in packages_as_set
|
||||||
]
|
]
|
||||||
|
|
||||||
# Narrow down:
|
# Narrow down:
|
||||||
@ -3508,14 +3596,16 @@ class subcmd_client:
|
|||||||
# Check if the cache should be used.
|
# Check if the cache should be used.
|
||||||
found = False
|
found = False
|
||||||
if os.path.exists(filepath_local_cache_archive):
|
if os.path.exists(filepath_local_cache_archive):
|
||||||
if (
|
if local_cache:
|
||||||
local_cache and (
|
if isinstance((result := sha256_from_file_or_error(
|
||||||
archive_size_expected,
|
filepath_local_cache_archive,
|
||||||
archive_hash_expected,
|
hash_prefix=True,
|
||||||
) == sha256_from_file(filepath_local_cache_archive, hash_prefix=True)
|
)), str):
|
||||||
):
|
# Only a warning because it's not a problem to re-download the file.
|
||||||
found = True
|
msglog.warn("unable to calculate hash for cache: {:s}".format(result))
|
||||||
else:
|
elif result == (archive_size_expected, archive_hash_expected):
|
||||||
|
found = True
|
||||||
|
if not found:
|
||||||
os.unlink(filepath_local_cache_archive)
|
os.unlink(filepath_local_cache_archive)
|
||||||
|
|
||||||
if not found:
|
if not found:
|
||||||
@ -3610,6 +3700,17 @@ class subcmd_client:
|
|||||||
msglog.fatal_error("Missing local \"{:s}\"".format(local_dir))
|
msglog.fatal_error("Missing local \"{:s}\"".format(local_dir))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
# Ensure a private directory so a local cache can be created.
|
||||||
|
# TODO: don't create (it's only accessed for file removal).
|
||||||
|
if (local_cache_dir := repo_local_private_dir_ensure_with_subdir(
|
||||||
|
local_dir=local_dir,
|
||||||
|
subdir="cache",
|
||||||
|
error_fn=lambda ex: any_as_none(
|
||||||
|
msglog.fatal_error("Error creating cache directory: {:s}".format(str(ex)))
|
||||||
|
),
|
||||||
|
)) is None:
|
||||||
|
return False
|
||||||
|
|
||||||
# Most likely this doesn't have duplicates,but any errors procured by duplicates
|
# Most likely this doesn't have duplicates,but any errors procured by duplicates
|
||||||
# are likely to be obtuse enough that it's better to guarantee there are none.
|
# are likely to be obtuse enough that it's better to guarantee there are none.
|
||||||
packages = tuple(sorted(set(packages)))
|
packages = tuple(sorted(set(packages)))
|
||||||
@ -3639,10 +3740,6 @@ class subcmd_client:
|
|||||||
if has_fatal_error:
|
if has_fatal_error:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Ensure a private directory so a local cache can be created.
|
|
||||||
# TODO: don't create (it's only accessed for file removal).
|
|
||||||
local_cache_dir = repo_local_private_dir_ensure_with_subdir(local_dir=local_dir, subdir="cache")
|
|
||||||
|
|
||||||
files_to_clean: List[str] = []
|
files_to_clean: List[str] = []
|
||||||
with CleanupPathsContext(files=files_to_clean, directories=()):
|
with CleanupPathsContext(files=files_to_clean, directories=()):
|
||||||
for pkg_idname in packages_valid:
|
for pkg_idname in packages_valid:
|
||||||
@ -3907,17 +4004,21 @@ class subcmd_author:
|
|||||||
|
|
||||||
zip_data_override: Optional[bytes] = None
|
zip_data_override: Optional[bytes] = None
|
||||||
if platform and (filepath_rel == PKG_MANIFEST_FILENAME_TOML):
|
if platform and (filepath_rel == PKG_MANIFEST_FILENAME_TOML):
|
||||||
with open(filepath_abs, "rb") as temp_fh:
|
zip_data_override = b"".join((
|
||||||
zip_data_override = temp_fh.read()
|
b"\n",
|
||||||
zip_data_override = zip_data_override + b"".join((
|
b"\n",
|
||||||
b"\n",
|
b"# BEGIN GENERATED CONTENT.\n",
|
||||||
b"\n",
|
b"# This must not be included in source manifests.\n",
|
||||||
b"# BEGIN GENERATED CONTENT.\n",
|
b"[build.generated]\n",
|
||||||
b"# This must not be included in source manifests.\n",
|
"platforms = [\"{:s}\"]\n".format(platform).encode("utf-8"),
|
||||||
b"[build.generated]\n",
|
b"# END GENERATED CONTENT.\n",
|
||||||
"platforms = [\"{:s}\"]\n".format(platform).encode("utf-8"),
|
))
|
||||||
b"# END GENERATED CONTENT.\n",
|
try:
|
||||||
))
|
with open(filepath_abs, "rb") as temp_fh:
|
||||||
|
zip_data_override = temp_fh.read() + zip_data_override
|
||||||
|
except Exception as ex:
|
||||||
|
msglog.status("Error overriding manifest \"{:s}\"".format(str(ex)))
|
||||||
|
return False
|
||||||
|
|
||||||
# Handy for testing that sub-directories:
|
# Handy for testing that sub-directories:
|
||||||
# zip_fh.write(filepath_abs, manifest.id + "/" + filepath_rel)
|
# zip_fh.write(filepath_abs, manifest.id + "/" + filepath_rel)
|
||||||
|
Loading…
Reference in New Issue
Block a user