chore(deps): update machine-learning (#6302)

* chore(deps): update machine-learning

* fix typing, use new lifespan syntax

* wrap in try / finally

* move log

---------

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: mertalev <101130780+mertalev@users.noreply.github.com>
This commit is contained in:
renovate[bot]
2024-01-13 05:00:09 +00:00
committed by GitHub
parent bd5ae9f31e
commit 20be42cec0
12 changed files with 238 additions and 212 deletions

View File

@ -1,13 +1,10 @@
from enum import StrEnum
from typing import Any, Protocol, TypeAlias, TypedDict, TypeGuard
from typing import Any, Protocol, TypedDict, TypeGuard
import numpy as np
import numpy.typing as npt
from pydantic import BaseModel
ndarray_f32: TypeAlias = np.ndarray[int, np.dtype[np.float32]]
ndarray_i64: TypeAlias = np.ndarray[int, np.dtype[np.int64]]
ndarray_i32: TypeAlias = np.ndarray[int, np.dtype[np.int32]]
class TextResponse(BaseModel):
__root__: str
@ -35,7 +32,7 @@ class HasProfiling(Protocol):
class Face(TypedDict):
boundingBox: BoundingBox
embedding: ndarray_f32
embedding: npt.NDArray[np.float32]
imageWidth: int
imageHeight: int
score: float
@ -43,3 +40,7 @@ class Face(TypedDict):
def has_profiling(obj: Any) -> TypeGuard[HasProfiling]:
return hasattr(obj, "profiling") and isinstance(obj.profiling, dict)
def is_ndarray(obj: Any, dtype: "type[np._DTypeScalar_co]") -> "TypeGuard[npt.NDArray[np._DTypeScalar_co]]":
return isinstance(obj, np.ndarray) and obj.dtype == dtype