Merge branch 'blender-v4.2-release'

This commit is contained in:
Campbell Barton 2024-07-01 15:16:48 +10:00
commit 14a2e933f4
8 changed files with 666 additions and 44 deletions

@ -90,6 +90,50 @@ def cookie_from_session():
# -----------------------------------------------------------------------------
# Shared Low Level Utilities
# NOTE(@ideasman42): this is used externally from `addon_utils` which is something we try to avoid but done in
# the case of generating compatibility cache, avoiding this "bad-level call" would be good but impractical when
# the command line tool is treated as a stand-alone program (which I prefer to keep).
def manifest_compatible_with_wheel_data_or_error(
pkg_manifest_filepath, # `str`
repo_module, # `str`
pkg_id, # `str`
repo_directory, # `str`
wheel_list, # `List[Tuple[str, List[str]]]`
): # `Optional[str]`
from bl_pkg.bl_extension_utils import (
pkg_manifest_dict_is_valid_or_error,
toml_from_filepath,
)
from bl_pkg.bl_extension_ops import (
pkg_manifest_params_compatible_or_error_for_this_system,
)
try:
manifest_dict = toml_from_filepath(pkg_manifest_filepath)
except Exception as ex:
return "Error reading TOML data {:s}".format(str(ex))
if (error := pkg_manifest_dict_is_valid_or_error(manifest_dict, from_repo=False, strict=False)):
return error
if isinstance(error := pkg_manifest_params_compatible_or_error_for_this_system(
blender_version_min=manifest_dict.get("blender_version_min", ""),
blender_version_max=manifest_dict.get("blender_version_max", ""),
platforms=manifest_dict.get("platforms", ""),
), str):
return error
# NOTE: the caller may need to collect wheels when refreshing.
# While this isn't so clean it happens to be efficient.
# It could be refactored to work differently in the future if that is ever needed.
if wheels_rel := manifest_dict.get("wheels"):
from .bl_extension_ops import pkg_wheel_filter
if (wheel_abs := pkg_wheel_filter(repo_module, pkg_id, repo_directory, wheels_rel)) is not None:
wheel_list.append(wheel_abs)
return None
def repo_paths_or_none(repo_item):
if (directory := repo_item.directory) == "":
return None, None

@ -9,6 +9,7 @@ Where the operator shows progress, any errors and supports canceling operations.
__all__ = (
"extension_repos_read",
"pkg_wheel_filter",
)
import os
@ -60,6 +61,9 @@ rna_prop_enable_on_install_type_map = {
"theme": "Set Current Theme",
}
_ext_base_pkg_idname = "bl_ext"
_ext_base_pkg_idname_with_dot = _ext_base_pkg_idname + "."
def url_append_defaults(url):
from .bl_extension_utils import url_append_query_for_blender
@ -488,7 +492,7 @@ def _preferences_ensure_disabled(*, repo_item, pkg_id_sequence, default_set):
modules_clear = []
module_base_elem = ("bl_ext", repo_item.module)
module_base_elem = (_ext_base_pkg_idname, repo_item.module)
repo_module = sys.modules.get(".".join(module_base_elem))
if repo_module is None:
@ -600,7 +604,7 @@ def _preferences_install_post_enable_on_install(
if pkg_id in pkg_id_sequence_upgrade:
continue
addon_module_name = "bl_ext.{:s}.{:s}".format(repo_item.module, pkg_id)
addon_module_name = "{:s}.{:s}.{:s}".format(_ext_base_pkg_idname, repo_item.module, pkg_id)
addon_utils.enable(addon_module_name, default_set=True, handle_error=handle_error)
elif item_local.type == "theme":
if has_theme:
@ -819,7 +823,61 @@ def _extensions_wheel_filter_for_platform(wheels):
return wheels_compatible
def _extensions_repo_sync_wheels(repo_cache_store):
def pkg_wheel_filter(
repo_module, # `str`
pkg_id, # `str`
repo_directory, # `str`
wheels_rel, # `List[str]`
): # `-> Tuple[str, List[str]]`
# Filter only the wheels for this platform.
wheels_rel = _extensions_wheel_filter_for_platform(wheels_rel)
if not wheels_rel:
return None
pkg_dirpath = os.path.join(repo_directory, pkg_id)
wheels_abs = []
for filepath_rel in wheels_rel:
filepath_abs = os.path.join(pkg_dirpath, filepath_rel)
if not os.path.exists(filepath_abs):
continue
wheels_abs.append(filepath_abs)
if not wheels_abs:
return None
unique_pkg_id = "{:s}.{:s}".format(repo_module, pkg_id)
return (unique_pkg_id, wheels_abs)
def _extension_repos_directory_to_module_map():
return {repo.directory: repo.module for repo in bpy.context.preferences.extensions.repos if repo.enabled}
def _extensions_enabled():
from addon_utils import check_extension
extensions_enabled = set()
extensions_prefix_len = len(_ext_base_pkg_idname_with_dot)
for addon in bpy.context.preferences.addons:
module_name = addon.module
if check_extension(module_name):
extensions_enabled.add(module_name[extensions_prefix_len:].partition(".")[0::2])
return extensions_enabled
def _extensions_enabled_from_repo_directory_and_pkg_id_sequence(repo_directory_and_pkg_id_sequence):
# Use to calculate extensions which will be enabled,
# needed so the wheels for the extensions can be enabled before the add-on is enabled that uses them.
extensions_enabled_pending = set()
repo_directory_to_module_map = _extension_repos_directory_to_module_map()
for repo_directory, pkg_id_sequence in repo_directory_and_pkg_id_sequence:
repo_module = repo_directory_to_module_map[repo_directory]
for pkg_id in pkg_id_sequence:
extensions_enabled_pending.add((repo_module, pkg_id))
return extensions_enabled_pending
def _extensions_repo_sync_wheels(repo_cache_store, extensions_enabled):
"""
This function collects all wheels from all packages and ensures the packages are either extracted or removed
when they are no longer used.
@ -838,28 +896,18 @@ def _extensions_repo_sync_wheels(repo_cache_store):
repo_module = repo.module
repo_directory = repo.directory
for pkg_id, item_local in pkg_manifest_local.items():
pkg_dirpath = os.path.join(repo_directory, pkg_id)
# Check it's enabled before initializing its wheels.
# NOTE: no need for compatibility checks here as only compatible items will be included.
if (repo_module, pkg_id) not in extensions_enabled:
continue
wheels_rel = item_local.wheels
if not wheels_rel:
continue
# Filter only the wheels for this platform.
wheels_rel = _extensions_wheel_filter_for_platform(wheels_rel)
if not wheels_rel:
continue
wheels_abs = []
for filepath_rel in wheels_rel:
filepath_abs = os.path.join(pkg_dirpath, filepath_rel)
if not os.path.exists(filepath_abs):
continue
wheels_abs.append(filepath_abs)
if not wheels_abs:
continue
unique_pkg_id = "{:s}.{:s}".format(repo_module, pkg_id)
wheel_list.append((unique_pkg_id, wheels_abs))
if (wheel_abs := pkg_wheel_filter(repo_module, pkg_id, repo_directory, wheels_rel)) is not None:
wheel_list.append(wheel_abs)
extensions = bpy.utils.user_resource('EXTENSIONS')
local_dir = os.path.join(extensions, ".local")
@ -871,6 +919,26 @@ def _extensions_repo_sync_wheels(repo_cache_store):
)
def _extensions_repo_refresh_on_change(repo_cache_store, *, extensions_enabled, compat_calc, stats_calc):
import addon_utils
if extensions_enabled is not None:
_extensions_repo_sync_wheels(repo_cache_store, extensions_enabled)
# Wheel sync handled above.
if compat_calc:
# NOTE: `extensions_enabled` may contain add-ons which are not yet enabled (these are pending).
# These will *not* have their compatibility information refreshed here.
# This is acceptable because:
# - Installing & enabling an extension relies on the extension being compatible,
# so it can be assumed to already be the compatible.
# - If the add-on existed and was incompatible it *will* have it's compatibility recalculated.
# - Any missing cache entries will cause cache to be re-generated on next start or from an explicit refresh.
addon_utils.extensions_refresh(ensure_wheels=False)
if stats_calc:
repo_stats_calc()
# -----------------------------------------------------------------------------
# Theme Handling
#
@ -1417,6 +1485,8 @@ class EXTENSIONS_OT_repo_refresh_all(Operator):
# In-line `bpy.ops.preferences.addon_refresh`.
addon_utils.modules_refresh()
# Ensure compatibility info and wheels is up to date.
addon_utils.extensions_refresh(ensure_wheels=True)
_preferences_ui_redraw()
_preferences_ui_refresh_addons()
@ -1755,8 +1825,21 @@ class EXTENSIONS_OT_package_install_marked(Operator, _ExtCmdMixIn):
error_fn=self.error_fn_from_exception,
)
_extensions_repo_sync_wheels(repo_cache_store)
repo_stats_calc()
extensions_enabled = None
if self.enable_on_install:
extensions_enabled = _extensions_enabled()
extensions_enabled.update(
_extensions_enabled_from_repo_directory_and_pkg_id_sequence(
self._repo_map_packages_addon_only,
)
)
_extensions_repo_refresh_on_change(
repo_cache_store,
extensions_enabled=extensions_enabled,
compat_calc=True,
stats_calc=True,
)
# TODO: it would be nice to include this message in the banner.
def handle_error(ex):
@ -1779,6 +1862,17 @@ class EXTENSIONS_OT_package_install_marked(Operator, _ExtCmdMixIn):
handle_error=handle_error,
)
if self.enable_on_install:
if (extensions_enabled_test := _extensions_enabled()) != extensions_enabled:
# Some extensions could not be enabled, re-calculate wheels which may have been setup
# in anticipation for the add-on working.
_extensions_repo_refresh_on_change(
repo_cache_store,
extensions_enabled=extensions_enabled_test,
compat_calc=False,
stats_calc=False,
)
_preferences_ui_redraw()
_preferences_ui_refresh_addons()
@ -1878,8 +1972,12 @@ class EXTENSIONS_OT_package_uninstall_marked(Operator, _ExtCmdMixIn):
error_fn=self.error_fn_from_exception,
)
_extensions_repo_sync_wheels(repo_cache_store)
repo_stats_calc()
_extensions_repo_refresh_on_change(
repo_cache_store,
extensions_enabled=_extensions_enabled(),
compat_calc=True,
stats_calc=True,
)
_preferences_theme_state_restore(self._theme_restore)
@ -2082,8 +2180,22 @@ class EXTENSIONS_OT_package_install_files(Operator, _ExtCmdMixIn):
error_fn=self.error_fn_from_exception,
)
_extensions_repo_sync_wheels(repo_cache_store)
repo_stats_calc()
extensions_enabled = None
if self.enable_on_install:
extensions_enabled = _extensions_enabled()
# We may want to support multiple.
extensions_enabled.update(
_extensions_enabled_from_repo_directory_and_pkg_id_sequence(
[(self.repo_directory, self.pkg_id_sequence)]
)
)
_extensions_repo_refresh_on_change(
repo_cache_store,
extensions_enabled=extensions_enabled,
compat_calc=True,
stats_calc=True,
)
# TODO: it would be nice to include this message in the banner.
@ -2110,6 +2222,17 @@ class EXTENSIONS_OT_package_install_files(Operator, _ExtCmdMixIn):
handle_error=handle_error,
)
if self.enable_on_install:
if (extensions_enabled_test := _extensions_enabled()) != extensions_enabled:
# Some extensions could not be enabled, re-calculate wheels which may have been setup
# in anticipation for the add-on working.
_extensions_repo_refresh_on_change(
repo_cache_store,
extensions_enabled=extensions_enabled_test,
compat_calc=False,
stats_calc=False,
)
_preferences_ui_redraw()
_preferences_ui_refresh_addons()
@ -2385,8 +2508,21 @@ class EXTENSIONS_OT_package_install(Operator, _ExtCmdMixIn):
error_fn=self.error_fn_from_exception,
)
_extensions_repo_sync_wheels(repo_cache_store)
repo_stats_calc()
extensions_enabled = None
if self.enable_on_install:
extensions_enabled = _extensions_enabled()
extensions_enabled.update(
_extensions_enabled_from_repo_directory_and_pkg_id_sequence(
[(self.repo_directory, (self.pkg_id,))]
)
)
_extensions_repo_refresh_on_change(
repo_cache_store,
extensions_enabled=extensions_enabled,
compat_calc=True,
stats_calc=True,
)
# TODO: it would be nice to include this message in the banner.
def handle_error(ex):
@ -2412,6 +2548,17 @@ class EXTENSIONS_OT_package_install(Operator, _ExtCmdMixIn):
handle_error=handle_error,
)
if self.enable_on_install:
if (extensions_enabled_test := _extensions_enabled()) != extensions_enabled:
# Some extensions could not be enabled, re-calculate wheels which may have been setup
# in anticipation for the add-on working.
_extensions_repo_refresh_on_change(
repo_cache_store,
extensions_enabled=extensions_enabled_test,
compat_calc=False,
stats_calc=False,
)
_preferences_ui_redraw()
_preferences_ui_refresh_addons()
@ -2784,8 +2931,12 @@ class EXTENSIONS_OT_package_uninstall(Operator, _ExtCmdMixIn):
error_fn=self.error_fn_from_exception,
)
_extensions_repo_sync_wheels(repo_cache_store)
repo_stats_calc()
_extensions_repo_refresh_on_change(
repo_cache_store,
extensions_enabled=None,
compat_calc=True,
stats_calc=True,
)
_preferences_theme_state_restore(self._theme_restore)
@ -2995,7 +3146,9 @@ class EXTENSIONS_OT_package_show_settings(Operator):
def execute(self, _context):
repo_item = extension_repos_read_index(self.repo_index)
bpy.ops.preferences.addon_show(module="bl_ext.{:s}.{:s}".format(repo_item.module, self.pkg_id))
bpy.ops.preferences.addon_show(
module="{:s}.{:s}.{:s}".format(_ext_base_pkg_idname, repo_item.module, self.pkg_id),
)
return {'FINISHED'}

@ -77,7 +77,7 @@ class UI_OT_i18n_updatetranslation_work_repo(Operator):
self.settings.to_json(),
)
# Not working (UI is not refreshed...).
#self.report({'INFO'}, "Extracting messages, this will take some time...")
# self.report({'INFO'}, "Extracting messages, this will take some time...")
context.window_manager.progress_update(1)
ret = subprocess.run(cmmd, env=env)
if ret.returncode != 0:

@ -0,0 +1,3 @@
# SPDX-FileCopyrightText: 2024 Blender Authors
#
# SPDX-License-Identifier: GPL-2.0-or-later

@ -0,0 +1,42 @@
# SPDX-FileCopyrightText: 2017-2023 Blender Authors
#
# SPDX-License-Identifier: GPL-2.0-or-later
"""
Implementation of blender's command line ``--addons`` argument,
e.g. ``--addons a,b,c`` to enable add-ons.
"""
__all__ = (
"set_from_cli",
)
def set_from_cli(addons_as_string):
from addon_utils import (
check,
check_extension,
enable,
extensions_refresh,
)
addon_modules = addons_as_string.split(",")
addon_modules_extensions = [m for m in addon_modules if check_extension(m)]
addon_modules_extensions_has_failure = False
if addon_modules_extensions:
extensions_refresh(
ensure_wheels=True,
addon_modules_pending=addon_modules_extensions,
)
for m in addon_modules:
if check(m)[1] is False:
if enable(m, persistent=True) is None:
if check_extension(m):
addon_modules_extensions_has_failure = True
# Re-calculate wheels if any extensions failed to be enabled.
if addon_modules_extensions_has_failure:
extensions_refresh(
ensure_wheels=True,
)

@ -12,6 +12,7 @@ __all__ = (
"disable_all",
"reset_all",
"module_bl_info",
"extensions_refresh",
)
import bpy as _bpy
@ -22,6 +23,10 @@ error_encoding = False
error_duplicates = []
addons_fake_modules = {}
# Global cached extensions, set before loading extensions on startup.
# `{addon_module_name: "Reason for incompatibility", ...}`
_extensions_incompatible = {}
# called only once at startup, avoids calling 'reset_all', correct but slower.
def _initialize_once():
@ -309,8 +314,6 @@ def enable(module_name, *, default_set=False, persistent=False, handle_error=Non
import importlib
from bpy_restrict_state import RestrictBlend
is_extension = module_name.startswith(_ext_base_pkg_idname_with_dot)
if handle_error is None:
def handle_error(ex):
if isinstance(ex, ImportError):
@ -323,6 +326,18 @@ def enable(module_name, *, default_set=False, persistent=False, handle_error=Non
import traceback
traceback.print_exc()
if (is_extension := module_name.startswith(_ext_base_pkg_idname_with_dot)):
# Ensure the extensions are compatible.
if _extensions_incompatible:
if (error := _extensions_incompatible.get(
module_name[len(_ext_base_pkg_idname_with_dot):].partition(".")[0::2],
)):
try:
raise RuntimeError("Extension {:s} is incompatible ({:s})".format(module_name, error))
except RuntimeError as ex:
handle_error(ex)
return None
# reload if the mtime changes
mod = sys.modules.get(module_name)
# chances of the file _not_ existing are low, but it could be removed
@ -540,6 +555,10 @@ def reset_all(*, reload_scripts=False):
modules._is_first = True
addons_fake_modules.clear()
# Update extensions compatibility (after reloading preferences).
# Potentially refreshing wheels too.
_initialize_extensions_compat_data(_bpy.utils.user_resource('EXTENSIONS'), True, None)
for path, pkg_id in _paths_with_extension_repos():
if not pkg_id:
_bpy.utils._sys_path_ensure_append(path)
@ -654,6 +673,336 @@ def module_bl_info(mod, *, info_basis=None):
return addon_info
# -----------------------------------------------------------------------------
# Extension Pre-Flight Compatibility Check
#
# Check extension compatibility on startup so any extensions which are incompatible with Blender are marked as
# incompatible and wont be loaded. This cache avoids having to scan all extensions on startup on *every* startup.
#
# Implementation:
#
# The emphasis for this cache is to have minimum overhead for the common case where:
# - The simple case where there are no extensions enabled (running tests, background tasks etc).
# - The more involved case where extensions are enabled and have not changed since last time Blender started.
# In this case do as little as possible since it runs on every startup, the following steps are unavoidable.
# - When reading compatibility cache, then run the following tests, regenerating when changes are detected.
# - Compare with previous blender version/platform.
# - Stat the manifests of all enabled extensions, testing that their modification-time and size are unchanged.
# - When any changes are detected,
# regenerate compatibility information which does more expensive operations
# (loading manifests, check version ranges etc).
#
# Other notes:
#
# - This internal format may change at any point, regenerating the cache should be reasonably fast
# but may introduce a small but noticeable pause on startup for user configurations that contain many extensions.
# - Failure to load will simply ignore the file and regenerate the file as needed.
#
# Format:
#
# - The cache is ZLIB compressed pickled Python dictionary.
# - The dictionary keys are as follows:
# `"blender": (bpy.app.version, platform.system(), platform.machine(), python_version, magic_number)`
# `"filesystem": [(repo_module, pkg_id, manifest_time, manifest_size), ...]`
# `"incompatible": {(repo_module, pkg_id): "Reason for being incompatible", ...}`
#
def _pickle_zlib_file_read(filepath):
import pickle
import gzip
with gzip.GzipFile(filepath, "rb") as fh:
data = pickle.load(fh)
return data
def _pickle_zlib_file_write(filepath, data) -> None:
import pickle
import gzip
with gzip.GzipFile(filepath, "wb", compresslevel=9) as fh:
pickle.dump(data, fh)
def _extension_repos_module_to_directory_map():
return {repo.module: repo.directory for repo in _preferences.extensions.repos if repo.enabled}
def _extension_compat_cache_update_needed(
cache_data, # `Dict[str, Any]`
blender_id, # `Tuple[Any, ...]`
extensions_enabled, # `Set[Tuple[str, str]]`
print_debug, # `Optional[Callable[[Any], None]]`
): # `-> bool`
# Detect when Blender itself changes.
if cache_data.get("blender") != blender_id:
if print_debug is not None:
print_debug("blender changed")
return True
# Detect when any of the extensions paths change.
cache_filesystem = cache_data.get("filesystem", [])
# Avoid touching the file-system if at all possible.
# When the length is the same and all cached ID's are in this set, we can be sure they are a 1:1 patch.
if len(cache_filesystem) != len(extensions_enabled):
if print_debug is not None:
print_debug("length changes ({:d} -> {:d}).".format(len(cache_filesystem), len(extensions_enabled)))
return True
from os import stat
from os.path import join
repos_module_to_directory_map = _extension_repos_module_to_directory_map()
for repo_module, pkg_id, cache_stat_time, cache_stat_size in cache_filesystem:
if (repo_module, pkg_id) not in extensions_enabled:
if print_debug is not None:
print_debug("\"{:s}.{:s}\" no longer enabled.".format(repo_module, pkg_id))
return True
if repo_directory := repos_module_to_directory_map.get(repo_module, ""):
pkg_manifest_filepath = join(repo_directory, pkg_id, _ext_manifest_filename_toml)
else:
pkg_manifest_filepath = ""
# It's possible an extension has been set as an add-on but cannot find the repository it came from.
# In this case behave as if the file can't be found (because it can't) instead of ignoring it.
# This is done because it's important to match.
if pkg_manifest_filepath:
try:
statinfo = stat(pkg_manifest_filepath)
except Exception:
statinfo = None
else:
statinfo = None
if statinfo is None:
test_time = 0
test_size = 0
else:
test_time = statinfo.st_mtime
test_size = statinfo.st_size
# Detect changes to any files manifest.
if cache_stat_time != test_time:
if print_debug is not None:
print_debug("\"{:s}.{:s}\" time changed ({:g} -> {:g}).".format(
repo_module, pkg_id, cache_stat_time, test_time,
))
return True
if cache_stat_size != test_size:
if print_debug is not None:
print_debug("\"{:s}.{:s}\" size changed ({:d} -> {:d}).".format(
repo_module, pkg_id, cache_stat_size, test_size,
))
return True
return False
# This function should not run every startup, so it can afford to be slower,
# although users should not have to wait for it either.
def _extension_compat_cache_create(
blender_id, # `Tuple[Any, ...]`
extensions_enabled, # `Set[Tuple[str, str]]`
wheel_list, # `List[Tuple[str, List[str]]]`
print_debug, # `Optional[Callable[[Any], None]]`
): # `-> Dict[str, Any]`
import os
from os.path import join
filesystem = []
incompatible = {}
cache_data = {
"blender": blender_id,
"filesystem": filesystem,
"incompatible": incompatible,
}
repos_module_to_directory_map = _extension_repos_module_to_directory_map()
for repo_module, pkg_id in extensions_enabled:
if repo_directory := repos_module_to_directory_map.get(repo_module, ""):
pkg_manifest_filepath = join(repo_directory, pkg_id, _ext_manifest_filename_toml)
else:
pkg_manifest_filepath = ""
if print_debug is not None:
print_debug("directory for module \"{:s}\" not found!".format(repo_module))
if pkg_manifest_filepath:
try:
statinfo = os.stat(pkg_manifest_filepath)
except Exception:
statinfo = None
if print_debug is not None:
print_debug("unable to find \"{:s}\"".format(pkg_manifest_filepath))
else:
statinfo = None
if statinfo is None:
test_time = 0.0
test_size = 0.0
else:
test_time = statinfo.st_mtime
test_size = statinfo.st_size
# Store the reason for failure, to print when attempting to load.
from bl_pkg import manifest_compatible_with_wheel_data_or_error
if (error := manifest_compatible_with_wheel_data_or_error(
pkg_manifest_filepath,
repo_module,
pkg_id,
repo_directory,
wheel_list,
)) is not None:
incompatible[(repo_module, pkg_id)] = error
filesystem.append((repo_module, pkg_id, test_time, test_size))
return cache_data
def _initialize_extensions_compat_ensure_up_to_date(extensions_directory, extensions_enabled, print_debug):
import os
import platform
import sys
global _extensions_incompatible
updated = False
wheel_list = []
# Number to bump to change this format and force re-generation.
magic_number = 0
blender_id = (_bpy.app.version, platform.system(), platform.machine(), sys.version_info[0:2], magic_number)
filepath_compat = os.path.join(extensions_directory, ".cache", "compat.dat")
# Cache data contains a dict of:
# {
# "blender": (...)
# "paths": [path data to detect changes]
# "incompatible": {set of incompatible extensions}
# }
if os.path.exists(filepath_compat):
try:
cache_data = _pickle_zlib_file_read(filepath_compat)
except Exception as ex:
cache_data = None
# While this should not happen continuously (that would point to writing invalid cache),
# it is not a problem if there is some corruption with the cache and it needs to be re-generated.
# Show a message since this should be a rare occurrence - if it happens often it's likely to be a bug.
print("Extensions: reading cache failed ({:s}), creating...".format(str(ex)))
else:
cache_data = None
if print_debug is not None:
print_debug("doesn't exist, creating...")
if cache_data is not None:
# NOTE: the exception handling here is fairly paranoid and accounts for invalid values in the loaded cache.
# An example would be values expected to be lists/dictionaries being other types (None or strings for e.g.).
# While this should not happen, some bad value should not prevent Blender from loading properly,
# so report the error and regenerate cache.
try:
if _extension_compat_cache_update_needed(cache_data, blender_id, extensions_enabled, print_debug):
cache_data = None
except Exception as ex:
print("Extension: unexpected error reading cache, this is is a bug! (regenerating)")
import traceback
traceback.print_exc()
cache_data = None
if cache_data is None:
cache_data = _extension_compat_cache_create(blender_id, extensions_enabled, wheel_list, print_debug)
try:
os.makedirs(os.path.dirname(filepath_compat), exist_ok=True)
_pickle_zlib_file_write(filepath_compat, cache_data)
if print_debug is not None:
print_debug("update written to disk.")
except Exception as ex:
# Should be rare but should not cause this function to fail.
print("Extensions: writing cache failed ({:s}).".format(str(ex)))
# Set to true even when not written to disk as the run-time data *has* been updated,
# cache will attempt to be generated next time this is called.
updated = True
else:
if print_debug is not None:
print_debug("up to date.")
_extensions_incompatible = cache_data["incompatible"]
return updated, wheel_list
def _initialize_extensions_compat_ensure_up_to_date_wheels(extensions_directory, wheel_list):
import os
_extension_sync_wheels(
local_dir=os.path.join(extensions_directory, ".local"),
wheel_list=wheel_list,
)
def _initialize_extensions_compat_data(extensions_directory, ensure_wheels, addon_modules_pending):
# WARNING: this function must *never* raise an exception because it would interfere with low level initialization.
# As the function deals with file IO, use what are typically over zealous exception checks so as to rule out
# interfering with Blender loading properly in unexpected cases such as disk-full, read-only file-system
# or any other rare but possible scenarios.
_extensions_incompatible.clear()
# Create a set of all extension ID's.
extensions_enabled = set()
extensions_prefix_len = len(_ext_base_pkg_idname_with_dot)
for addon in _preferences.addons:
module_name = addon.module
if check_extension(module_name):
extensions_enabled.add(module_name[extensions_prefix_len:].partition(".")[0::2])
if addon_modules_pending is not None:
for module_name in addon_modules_pending:
if check_extension(module_name):
extensions_enabled.add(module_name[extensions_prefix_len:].partition(".")[0::2])
print_debug = (
(lambda *args, **kwargs: print("Extension version cache:", *args, **kwargs)) if _bpy.app.debug_python else
None
)
# Early exit, use for automated tests.
# Avoid (relatively) expensive file-system scanning if at all possible.
if not extensions_enabled:
if print_debug is not None:
print_debug("no extensions, skipping cache data.")
return
# While this isn't expected to fail, any failure here is a bug
# but it should not cause Blender's startup to fail.
try:
updated, wheel_list = _initialize_extensions_compat_ensure_up_to_date(
extensions_directory,
extensions_enabled,
print_debug,
)
except Exception as ex:
print("Extension: unexpected error detecting cache, this is is a bug!")
import traceback
traceback.print_exc()
updated = False
if ensure_wheels:
if updated:
try:
_initialize_extensions_compat_ensure_up_to_date_wheels(extensions_directory, wheel_list)
except Exception as ex:
print("Extension: unexpected error updating wheels, this is is a bug!")
import traceback
traceback.print_exc()
# -----------------------------------------------------------------------------
# Extension Utilities
@ -1149,7 +1498,7 @@ def _initialize_extension_repos_post(*_, is_first=False):
modules._is_first = True
def _initialize_extensions_site_packages(*, create=False):
def _initialize_extensions_site_packages(*, extensions_directory, create=False):
# Add extension site-packages to `sys.path` (if it exists).
# Use for wheels.
import os
@ -1161,7 +1510,7 @@ def _initialize_extensions_site_packages(*, create=False):
# so this can't simply be treated as a module directory unless those files would be excluded
# which may interfere with the wheels functionality.
site_packages = os.path.join(
_bpy.utils.user_resource('EXTENSIONS'),
extensions_directory,
".local",
"lib",
"python{:d}.{:d}".format(sys.version_info.major, sys.version_info.minor),
@ -1205,8 +1554,13 @@ def _initialize_extensions_repos_once():
module_handle.register_module()
_ext_global.module_handle = module_handle
extensions_directory = _bpy.utils.user_resource('EXTENSIONS')
# Ensure extensions wheels can be loaded (when found).
_initialize_extensions_site_packages()
_initialize_extensions_site_packages(extensions_directory=extensions_directory)
# Ensure extension compatibility data has been loaded and matches the manifests.
_initialize_extensions_compat_data(extensions_directory, True, None)
# Setup repositories for the first time.
# Intentionally don't call `_initialize_extension_repos_pre` as this is the first time,
@ -1216,3 +1570,16 @@ def _initialize_extensions_repos_once():
# Internal handlers intended for Blender's own handling of repositories.
_bpy.app.handlers._extension_repos_update_pre.append(_initialize_extension_repos_pre)
_bpy.app.handlers._extension_repos_update_post.append(_initialize_extension_repos_post)
# -----------------------------------------------------------------------------
# Extension Public API
def extensions_refresh(ensure_wheels=True, addon_modules_pending=None):
# Ensure any changes to extensions refresh `_extensions_incompatible`.
_initialize_extensions_compat_data(
_bpy.utils.user_resource('EXTENSIONS'),
ensure_wheels=ensure_wheels,
addon_modules_pending=addon_modules_pending,
)

@ -451,7 +451,14 @@ class PREFERENCES_OT_addon_enable(Operator):
nonlocal err_str
err_str = str(ex)
mod = addon_utils.enable(self.module, default_set=True, handle_error=err_cb)
module_name = self.module
# Ensure any wheels are setup before enabling.
is_extension = addon_utils.check_extension(module_name)
if is_extension:
addon_utils.extensions_refresh(ensure_wheels=True, addon_modules_pending=[module_name])
mod = addon_utils.enable(module_name, default_set=True, handle_error=err_cb)
if mod:
bl_info = addon_utils.module_bl_info(mod)
@ -474,6 +481,10 @@ class PREFERENCES_OT_addon_enable(Operator):
if err_str:
self.report({'ERROR'}, err_str)
if is_extension:
# Since the add-on didn't work, remove any wheels it may have installed.
addon_utils.extensions_refresh(ensure_wheels=True)
return {'CANCELLED'}
@ -498,7 +509,11 @@ class PREFERENCES_OT_addon_disable(Operator):
err_str = traceback.format_exc()
print(err_str)
addon_utils.disable(self.module, default_set=True, handle_error=err_cb)
module_name = self.module
is_extension = addon_utils.check_extension(module_name)
addon_utils.disable(module_name, default_set=True, handle_error=err_cb)
if is_extension:
addon_utils.extensions_refresh(ensure_wheels=True)
if err_str:
self.report({'ERROR'}, err_str)

@ -2385,10 +2385,8 @@ static int arg_handle_addons_set(int argc, const char **argv, void *data)
if (argc > 1) {
# ifdef WITH_PYTHON
const char script_str[] =
"from addon_utils import check, enable\n"
"for m in '%s'.split(','):\n"
" if check(m)[1] is False:\n"
" enable(m, persistent=True)";
"from _bpy_internal.addons.cli import set_from_cli\n"
"set_from_cli('%s')";
const int slen = strlen(argv[1]) + (sizeof(script_str) - 2);
char *str = static_cast<char *>(malloc(slen));
bContext *C = static_cast<bContext *>(data);