Extensions: support file URI's for the remote URL

Previously regular paths were supported however supporting both
URL's and file paths isn't very common & complicated internal logic.

Also fixed an error where the server-generate sub-command created the
"archive_url" from the file-name without URL encoding.
This commit is contained in:
Campbell Barton 2024-05-16 12:04:37 +10:00
parent dd1769088b
commit 672d203e67
6 changed files with 118 additions and 90 deletions

@ -1656,8 +1656,16 @@ class BlPkgPkgInstallFiles(Operator, _BlPkgCmdMixIn):
def _invoke_for_drop(self, context, event):
self._drop_variables = True
# Drop logic.
url = self.url
print("DROP FILE:", url)
print("DROP FILE:", self.url)
# Blender calls the drop logic with an un-encoded file-path.
# It would be nicer if it used the file URI schema,
# however this is only activated from Blender's drop logic.
#
# TODO: even though Blender supports "remote" repositories referencing `file://` prefixed URL's.
# These are not supported for dropping. Since at the time of dropping it's not known that the
# path is referenced from a "local" repository or a "remote" that uses a `file://` URL.
filepath = self.url
from .bl_extension_ops import repo_iter_valid_local_only
from .bl_extension_utils import pkg_manifest_dict_from_file_or_error
@ -1666,7 +1674,7 @@ class BlPkgPkgInstallFiles(Operator, _BlPkgCmdMixIn):
self.report({'ERROR'}, "No Local Repositories")
return {'CANCELLED'}
if isinstance(result := pkg_manifest_dict_from_file_or_error(url), str):
if isinstance(result := pkg_manifest_dict_from_file_or_error(filepath), str):
self.report({'ERROR'}, "Error in manifest {:s}".format(result))
return {'CANCELLED'}
@ -1678,7 +1686,7 @@ class BlPkgPkgInstallFiles(Operator, _BlPkgCmdMixIn):
# Set to it's self to the property is considered "set".
self.repo = self.repo
self.filepath = url
self.filepath = filepath
wm = context.window_manager
wm.invoke_props_dialog(self)

@ -486,20 +486,15 @@ def pkg_manifest_dict_from_file_or_error(
def pkg_manifest_archive_url_abs_from_remote_url(remote_url: str, archive_url: str) -> str:
from .cli.blender_ext import remote_url_has_filename_suffix
if archive_url.startswith("./"):
if (
len(remote_url) > len(PKG_REPO_LIST_FILENAME) and
remote_url.endswith(PKG_REPO_LIST_FILENAME) and
(remote_url[-(len(PKG_REPO_LIST_FILENAME) + 1)] in {"\\", "/"})
):
if remote_url_has_filename_suffix(remote_url):
# The URL contains the JSON name, strip this off before adding the package name.
archive_url = remote_url[:-len(PKG_REPO_LIST_FILENAME)] + archive_url[2:]
elif remote_url.startswith(("http://", "https://", "file://")):
else:
assert remote_url.startswith(("http://", "https://", "file://"))
# Simply add to the URL.
archive_url = remote_url.rstrip("/") + archive_url[1:]
else:
# Handle as a regular path.
archive_url = os.path.join(remote_url, archive_url[2:])
return archive_url

@ -47,10 +47,6 @@ REQUEST_EXIT = False
# When set, ignore broken pipe exceptions (these occur when the calling processes is closed).
FORCE_EXIT_OK = False
# Expect the remote URL to contain JSON (don't append the JSON name to the path).
# File-system still append the expected JSON filename.
REMOTE_REPO_HAS_JSON_IMPLIED = True
def signal_handler_sigint(_sig: int, _frame: Any) -> None:
# pylint: disable-next=global-statement
@ -80,6 +76,8 @@ PKG_MANIFEST_FILENAME_TOML = "blender_manifest.toml"
# This directory is in the local repository.
REPO_LOCAL_PRIVATE_DIR = ".blender_ext"
URL_KNOWN_PREFIX = ("http://", "https://", "file://")
MESSAGE_TYPES = {'STATUS', 'PROGRESS', 'WARN', 'ERROR', 'PATH', 'DONE'}
RE_MANIFEST_SEMVER = re.compile(
@ -258,6 +256,19 @@ class PkgManifest_Archive(NamedTuple):
# -----------------------------------------------------------------------------
# Generic Functions
def path_to_url(path: str) -> str:
from urllib.parse import urljoin
from urllib.request import pathname2url
return urljoin("file:", pathname2url(path))
def path_from_url(path: str) -> str:
from urllib.parse import urlparse, unquote
p = urlparse(path)
return os.path.join(p.netloc, unquote(p.path))
def random_acii_lines(*, seed: Union[int, str], width: int) -> Generator[str, None, None]:
"""
Generate random ASCII text [A-Za-z0-9].
@ -565,10 +576,20 @@ def pkg_manifest_from_archive_and_validate(
return pkg_manifest_from_zipfile_and_validate(zip_fh, archive_subdir, strict=strict)
def remote_url_has_filename_suffix(url: str) -> bool:
# When the URL ends with `.json` it's assumed to be a URL that is inside a directory.
# In these cases the file is stripped before constricting relative paths.
return url.endswith("/" + PKG_REPO_LIST_FILENAME)
def remote_url_get(url: str) -> str:
if REMOTE_REPO_HAS_JSON_IMPLIED:
return url
return urllib.parse.urljoin(url, PKG_REPO_LIST_FILENAME)
if url_is_filesystem(url):
if remote_url_has_filename_suffix(url):
return url
# Add the default name to `file://` URL's if this isn't already a reference to a JSON.
return "{:s}/{:s}".format(url.rstrip("/"), PKG_REPO_LIST_FILENAME)
return url
# -----------------------------------------------------------------------------
@ -709,14 +730,13 @@ def filepath_retrieve_to_filepath_iter(
def url_retrieve_to_data_iter_or_filesystem(
path: str,
is_filesystem: bool,
url: str,
headers: Dict[str, str],
chunk_size: int,
timeout_in_seconds: float,
) -> Generator[bytes, None, None]:
if is_filesystem:
with open(path, "rb") as fh_source:
if url_is_filesystem(url):
with open(path_from_url(url), "rb") as fh_source:
while (block := fh_source.read(chunk_size)):
yield block
else:
@ -725,7 +745,7 @@ def url_retrieve_to_data_iter_or_filesystem(
_size,
_response_headers,
) in url_retrieve_to_data_iter(
path,
url,
headers=headers,
chunk_size=chunk_size,
timeout_in_seconds=timeout_in_seconds,
@ -734,23 +754,22 @@ def url_retrieve_to_data_iter_or_filesystem(
def url_retrieve_to_filepath_iter_or_filesystem(
path: str,
url: str,
filepath: str,
is_filesystem: bool,
headers: Dict[str, str],
chunk_size: int,
timeout_in_seconds: float,
) -> Generator[Tuple[int, int], None, None]:
if is_filesystem:
if url_is_filesystem(url):
yield from filepath_retrieve_to_filepath_iter(
path,
path_from_url(url),
filepath,
chunk_size=chunk_size,
timeout_in_seconds=timeout_in_seconds,
)
else:
for (read, size, _response_headers) in url_retrieve_to_filepath_iter(
path,
url,
filepath,
headers=headers,
chunk_size=chunk_size,
@ -1240,11 +1259,7 @@ def repo_sync_from_remote(
if request_exit:
return False
is_repo_filesystem = repo_is_filesystem(remote_url=remote_url)
if is_repo_filesystem:
remote_json_path = os.path.join(remote_url, PKG_REPO_LIST_FILENAME)
else:
remote_json_path = remote_url_get(remote_url)
remote_json_url = remote_url_get(remote_url)
local_private_dir = repo_local_private_dir_ensure(local_dir=local_dir)
local_json_path = os.path.join(local_private_dir, PKG_REPO_LIST_FILENAME)
@ -1263,14 +1278,11 @@ def repo_sync_from_remote(
if request_exit:
return False
# No progress for file copying, assume local file system is fast enough.
# `shutil.copyfile(remote_json_path, local_json_path_temp)`.
try:
read_total = 0
for (read, size) in url_retrieve_to_filepath_iter_or_filesystem(
remote_json_path,
remote_json_url,
local_json_path_temp,
is_filesystem=is_repo_filesystem,
headers=url_request_headers_create(accept_json=True, user_agent=online_user_agent),
chunk_size=CHUNK_SIZE_DEFAULT,
timeout_in_seconds=timeout_in_seconds,
@ -1351,10 +1363,19 @@ def repo_pkginfo_from_local_with_idname_as_key(*, local_dir: str) -> Optional[Pk
return pkg_repo_dat_from_json(result)
def repo_is_filesystem(*, remote_url: str) -> bool:
if remote_url.startswith(("https://", "http://")):
def url_has_known_prefix(path: str) -> bool:
return path.startswith(URL_KNOWN_PREFIX)
def url_is_filesystem(url: str) -> bool:
if url.startswith(("file://")):
return True
if url.startswith(URL_KNOWN_PREFIX):
return False
return True
# Argument parsing must ensure this never happens.
raise ValueError("prefix not known")
return False
# -----------------------------------------------------------------------------
@ -1375,6 +1396,16 @@ def arg_handle_str_as_package_names(value: str) -> Sequence[str]:
return result
def arg_handle_str_as_url(value: str) -> Sequence[str]:
# Handle so unexpected URL's don't cause difficult to understand errors in inner logic.
# The URL's themselves may be invalid still, this just fails early in the case of obvious oversights.
if not url_has_known_prefix(value):
raise argparse.ArgumentTypeError(
"Invalid URL \"{:s}\", expected a prefix in {!r}".format(value, URL_KNOWN_PREFIX)
)
return value
# -----------------------------------------------------------------------------
# Generate Repository
@ -1416,7 +1447,7 @@ def generic_arg_remote_url(subparse: argparse.ArgumentParser) -> None:
subparse.add_argument(
"--remote-url",
dest="remote_url",
type=str,
type=arg_handle_str_as_url,
help=(
"The remote repository URL."
),
@ -1588,9 +1619,8 @@ class subcmd_server:
repo_dir: str,
) -> bool:
is_repo_filesystem = repo_is_filesystem(remote_url=repo_dir)
if not is_repo_filesystem:
message_error(msg_fn, "Directory: {!r} must be local!".format(repo_dir))
if url_has_known_prefix(repo_dir):
message_error(msg_fn, "Directory: {!r} must be a local path, not a URL!".format(repo_dir))
return False
if not os.path.isdir(repo_dir):
@ -1647,7 +1677,7 @@ class subcmd_server:
continue
# A relative URL.
manifest_dict["archive_url"] = "./" + filename
manifest_dict["archive_url"] = "./" + urllib.request.pathname2url(filename)
# Add archive variables, see: `PkgManifest_Archive`.
(
@ -1678,26 +1708,13 @@ class subcmd_client:
online_user_agent: str,
timeout_in_seconds: float,
) -> bool:
is_repo_filesystem = repo_is_filesystem(remote_url=remote_url)
if is_repo_filesystem:
if not os.path.isdir(remote_url):
message_error(msg_fn, "Directory: {!r} not found!".format(remote_url))
return False
if is_repo_filesystem:
filepath_repo_json = os.path.join(remote_url, PKG_REPO_LIST_FILENAME)
if not os.path.exists(filepath_repo_json):
message_error(msg_fn, "File: {!r} not found!".format(filepath_repo_json))
return False
else:
filepath_repo_json = remote_url_get(remote_url)
remote_json_url = remote_url_get(remote_url)
# TODO: validate JSON content.
try:
result = io.BytesIO()
for block in url_retrieve_to_data_iter_or_filesystem(
filepath_repo_json,
is_filesystem=is_repo_filesystem,
remote_json_url,
headers=url_request_headers_create(accept_json=True, user_agent=online_user_agent),
chunk_size=CHUNK_SIZE_DEFAULT,
timeout_in_seconds=timeout_in_seconds,
@ -1905,7 +1922,6 @@ class subcmd_client:
timeout_in_seconds: float,
) -> bool:
# Extract...
is_repo_filesystem = repo_is_filesystem(remote_url=remote_url)
pkg_repo_data = repo_pkginfo_from_local_with_idname_as_key(local_dir=local_dir)
if pkg_repo_data is None:
# TODO: raise warning.
@ -1966,23 +1982,12 @@ class subcmd_client:
# Remote path.
if pkg_archive_url.startswith("./"):
if is_repo_filesystem:
filepath_remote_archive = os.path.join(remote_url, pkg_archive_url[2:])
if remote_url_has_filename_suffix(remote_url):
filepath_remote_archive = remote_url.rpartition("/")[0] + pkg_archive_url[1:]
else:
if REMOTE_REPO_HAS_JSON_IMPLIED:
# TODO: use `urllib.parse.urlsplit(..)`.
# NOTE: strip the path until the directory.
# Convert: `https://foo.bar/bl_ext_repo.json` -> https://foo.bar/ARCHIVE_NAME
filepath_remote_archive = urllib.parse.urljoin(
remote_url.rpartition("/")[0],
pkg_archive_url[2:],
)
else:
filepath_remote_archive = urllib.parse.urljoin(remote_url, pkg_archive_url[2:])
is_pkg_filesystem = is_repo_filesystem
filepath_remote_archive = remote_url.rstrip("/") + pkg_archive_url[1:]
else:
filepath_remote_archive = pkg_archive_url
is_pkg_filesystem = repo_is_filesystem(remote_url=pkg_archive_url)
# Check if the cache should be used.
found = False
@ -2008,7 +2013,6 @@ class subcmd_client:
with open(filepath_local_cache_archive, "wb") as fh_cache:
for block in url_retrieve_to_data_iter_or_filesystem(
filepath_remote_archive,
is_filesystem=is_pkg_filesystem,
headers=url_request_headers_create(accept_json=False, user_agent=online_user_agent),
chunk_size=CHUNK_SIZE_DEFAULT,
timeout_in_seconds=timeout_in_seconds,
@ -2382,7 +2386,7 @@ class subcmd_dummy:
)
return False
if not repo_is_filesystem(remote_url=repo_dir):
if url_has_known_prefix(repo_dir):
message_error(msg_fn, "Generating a repository on a remote path is not supported")
return False

@ -80,6 +80,11 @@ STATUS_NON_ERROR = {'STATUS', 'PROGRESS'}
# Generic Utilities
#
def path_to_url(path: str) -> str:
from urllib.parse import urljoin
from urllib.request import pathname2url
return urljoin('file:', pathname2url(path))
def rmdir_contents(directory: str) -> None:
"""
@ -292,7 +297,8 @@ class TestCLI_WithRepo(unittest.TestCase):
else:
cls.dirpath_url = "http://localhost:{:d}".format(HTTP_PORT)
else:
cls.dirpath_url = cls.dirpath
# Even local paths must URL syntax: `file://`.
cls.dirpath_url = path_to_url(cls.dirpath)
@classmethod
def tearDownClass(cls) -> None:

@ -48,11 +48,6 @@ sys.path.append(os.path.join(BASE_DIR, "modules"))
import python_wheel_generate # noqa: E402
CMD = (
sys.executable,
os.path.normpath(os.path.join(BASE_DIR, "..", "cli", "blender_ext.py")),
)
# Write the command to a script, use so it's possible to manually run commands outside of the test environment.
TEMP_COMMAND_OUTPUT = "" # os.path.join(tempfile.gettempdir(), "blender_test.sh")
@ -63,6 +58,12 @@ USE_PAUSE_BEFORE_EXIT = False
# -----------------------------------------------------------------------------
# Utility Functions
def path_to_url(path: str) -> str:
from urllib.parse import urljoin
from urllib.request import pathname2url
return urljoin('file:', pathname2url(path))
def pause_until_keyboard_interrupt() -> None:
print("Waiting for keyboard interrupt...")
try:
@ -258,6 +259,7 @@ def run_blender_extensions_no_errors(
# Initialized from `main()`.
TEMP_DIR_BLENDER_USER = ""
TEMP_DIR_REMOTE = ""
TEMP_DIR_REMOTE_AS_URL = ""
TEMP_DIR_LOCAL = ""
# Don't leave temporary files in TMP: `/tmp` (since it's only cleared on restart).
# Instead, have a test-local temporary directly which is removed when the test finishes.
@ -327,7 +329,7 @@ class TestSimple(TestWithTempBlenderUser_MixIn, unittest.TestCase):
"repo-add",
"--name", "MyTestRepo",
"--directory", TEMP_DIR_LOCAL,
"--url", TEMP_DIR_REMOTE,
"--url", TEMP_DIR_REMOTE_AS_URL,
# A bit odd, this argument avoids running so many commands to setup a test.
"--clear-all",
repo_id,
@ -354,7 +356,10 @@ class TestSimple(TestWithTempBlenderUser_MixIn, unittest.TestCase):
stdout = run_blender_extensions_no_errors((
"sync",
))
self.assertEqual(stdout.rstrip("\n").split("\n")[-1], "STATUS Sync complete: {:s}".format(TEMP_DIR_REMOTE))
self.assertEqual(
stdout.rstrip("\n").split("\n")[-1],
"STATUS Sync complete: {:s}".format(TEMP_DIR_REMOTE_AS_URL),
)
# Install the package into Blender.
@ -366,7 +371,7 @@ class TestSimple(TestWithTempBlenderUser_MixIn, unittest.TestCase):
''' name: "MyTestRepo"\n'''
''' directory: "{:s}"\n'''
''' url: "{:s}"\n'''
).format(TEMP_DIR_LOCAL, TEMP_DIR_REMOTE))
).format(TEMP_DIR_LOCAL, TEMP_DIR_REMOTE_AS_URL))
stdout = run_blender_extensions_no_errors(("list",))
self.assertEqual(
@ -435,7 +440,10 @@ class TestSimple(TestWithTempBlenderUser_MixIn, unittest.TestCase):
stdout = run_blender_extensions_no_errors((
"sync",
))
self.assertEqual(stdout.rstrip("\n").split("\n")[-1], "STATUS Sync complete: {:s}".format(TEMP_DIR_REMOTE))
self.assertEqual(
stdout.rstrip("\n").split("\n")[-1],
"STATUS Sync complete: {:s}".format(TEMP_DIR_REMOTE_AS_URL),
)
# Install.
@ -485,6 +493,7 @@ def main() -> None:
global TEMP_DIR_REMOTE
global TEMP_DIR_LOCAL
global TEMP_DIR_TMPDIR
global TEMP_DIR_REMOTE_AS_URL
with tempfile.TemporaryDirectory() as temp_prefix:
TEMP_DIR_BLENDER_USER = os.path.join(temp_prefix, "bl_ext_blender")
@ -503,6 +512,8 @@ def main() -> None:
for dirname in user_dirs:
os.makedirs(os.path.join(TEMP_DIR_BLENDER_USER, dirname), exist_ok=True)
TEMP_DIR_REMOTE_AS_URL = path_to_url(TEMP_DIR_REMOTE)
unittest.main()

@ -6685,7 +6685,11 @@ static void rna_def_userdef_filepaths_extension_repo(BlenderRNA *brna)
prop = RNA_def_property(srna, "remote_url", PROP_STRING, PROP_NONE);
RNA_def_property_string_sdna(prop, nullptr, "remote_url");
RNA_def_property_ui_text(prop, "URL", "Remote URL or path for extension repository");
RNA_def_property_ui_text(
prop,
"URL",
"Remote URL to the extension repository, "
"the file-system may be referenced using the file URI scheme: \"file://\"");
RNA_def_property_translation_context(prop, BLT_I18NCONTEXT_EDITOR_FILEBROWSER);
RNA_def_property_update(prop, 0, "rna_userdef_update");