2023-06-25 05:18:09 +02:00
|
|
|
from __future__ import annotations
|
|
|
|
|
2023-08-25 06:28:51 +02:00
|
|
|
import pickle
|
2023-06-27 19:21:50 +02:00
|
|
|
from abc import ABC, abstractmethod
|
2023-06-25 05:18:09 +02:00
|
|
|
from pathlib import Path
|
2023-06-27 23:01:24 +02:00
|
|
|
from shutil import rmtree
|
2023-06-25 05:18:09 +02:00
|
|
|
from typing import Any
|
|
|
|
|
2023-08-25 06:28:51 +02:00
|
|
|
import onnxruntime as ort
|
2023-11-12 03:04:49 +02:00
|
|
|
from huggingface_hub import snapshot_download
|
2023-11-13 18:18:46 +02:00
|
|
|
from typing_extensions import Buffer
|
2023-06-27 23:01:24 +02:00
|
|
|
|
2023-11-12 03:04:49 +02:00
|
|
|
from ..config import get_cache_dir, get_hf_model_name, log, settings
|
2023-06-25 05:18:09 +02:00
|
|
|
from ..schemas import ModelType
|
|
|
|
|
|
|
|
|
|
|
|
class InferenceModel(ABC):
|
|
|
|
_model_type: ModelType
|
|
|
|
|
2023-08-06 04:45:13 +02:00
|
|
|
def __init__(
|
2023-08-25 06:28:51 +02:00
|
|
|
self,
|
|
|
|
model_name: str,
|
|
|
|
cache_dir: Path | str | None = None,
|
|
|
|
inter_op_num_threads: int = settings.model_inter_op_threads,
|
|
|
|
intra_op_num_threads: int = settings.model_intra_op_threads,
|
|
|
|
**model_kwargs: Any,
|
2023-08-06 04:45:13 +02:00
|
|
|
) -> None:
|
2023-06-25 05:18:09 +02:00
|
|
|
self.model_name = model_name
|
2023-09-09 11:02:44 +02:00
|
|
|
self.loaded = False
|
2023-10-31 12:02:04 +02:00
|
|
|
self._cache_dir = Path(cache_dir) if cache_dir is not None else None
|
2023-08-25 06:28:51 +02:00
|
|
|
self.providers = model_kwargs.pop("providers", ["CPUExecutionProvider"])
|
|
|
|
# don't pre-allocate more memory than needed
|
|
|
|
self.provider_options = model_kwargs.pop(
|
|
|
|
"provider_options", [{"arena_extend_strategy": "kSameAsRequested"}] * len(self.providers)
|
|
|
|
)
|
2023-08-30 10:22:01 +02:00
|
|
|
log.debug(
|
|
|
|
(
|
|
|
|
f"Setting '{self.model_name}' execution providers to {self.providers}"
|
|
|
|
"in descending order of preference"
|
|
|
|
),
|
|
|
|
)
|
|
|
|
log.debug(f"Setting execution provider options to {self.provider_options}")
|
2023-08-25 06:28:51 +02:00
|
|
|
self.sess_options = PicklableSessionOptions()
|
|
|
|
# avoid thread contention between models
|
|
|
|
if inter_op_num_threads > 1:
|
|
|
|
self.sess_options.execution_mode = ort.ExecutionMode.ORT_PARALLEL
|
2023-08-30 10:22:01 +02:00
|
|
|
|
|
|
|
log.debug(f"Setting execution_mode to {self.sess_options.execution_mode.name}")
|
|
|
|
log.debug(f"Setting inter_op_num_threads to {inter_op_num_threads}")
|
|
|
|
log.debug(f"Setting intra_op_num_threads to {intra_op_num_threads}")
|
2023-08-25 06:28:51 +02:00
|
|
|
self.sess_options.inter_op_num_threads = inter_op_num_threads
|
|
|
|
self.sess_options.intra_op_num_threads = intra_op_num_threads
|
2023-09-01 01:30:53 +02:00
|
|
|
self.sess_options.enable_cpu_mem_arena = False
|
2023-08-25 06:28:51 +02:00
|
|
|
|
2023-09-09 11:02:44 +02:00
|
|
|
def download(self) -> None:
|
2023-08-06 04:45:13 +02:00
|
|
|
if not self.cached:
|
2023-08-30 10:22:01 +02:00
|
|
|
log.info(
|
2023-09-09 11:02:44 +02:00
|
|
|
(f"Downloading {self.model_type.replace('-', ' ')} model '{self.model_name}'." "This may take a while.")
|
2023-08-30 10:22:01 +02:00
|
|
|
)
|
2023-09-09 11:02:44 +02:00
|
|
|
self._download()
|
2023-06-27 23:01:24 +02:00
|
|
|
|
2023-09-09 11:02:44 +02:00
|
|
|
def load(self) -> None:
|
|
|
|
if self.loaded:
|
|
|
|
return
|
|
|
|
self.download()
|
|
|
|
log.info(f"Loading {self.model_type.replace('-', ' ')} model '{self.model_name}'")
|
|
|
|
self._load()
|
|
|
|
self.loaded = True
|
2023-08-06 04:45:13 +02:00
|
|
|
|
2023-08-29 15:58:00 +02:00
|
|
|
def predict(self, inputs: Any, **model_kwargs: Any) -> Any:
|
2023-09-09 11:02:44 +02:00
|
|
|
self.load()
|
2023-08-29 15:58:00 +02:00
|
|
|
if model_kwargs:
|
|
|
|
self.configure(**model_kwargs)
|
2023-08-06 04:45:13 +02:00
|
|
|
return self._predict(inputs)
|
|
|
|
|
|
|
|
@abstractmethod
|
|
|
|
def _predict(self, inputs: Any) -> Any:
|
2023-06-27 23:01:24 +02:00
|
|
|
...
|
|
|
|
|
2023-08-29 15:58:00 +02:00
|
|
|
def configure(self, **model_kwargs: Any) -> None:
|
|
|
|
pass
|
|
|
|
|
2023-09-09 11:02:44 +02:00
|
|
|
def _download(self) -> None:
|
2023-11-12 03:04:49 +02:00
|
|
|
snapshot_download(
|
|
|
|
get_hf_model_name(self.model_name),
|
|
|
|
cache_dir=self.cache_dir,
|
|
|
|
local_dir=self.cache_dir,
|
|
|
|
local_dir_use_symlinks=False,
|
|
|
|
)
|
2023-08-06 04:45:13 +02:00
|
|
|
|
|
|
|
@abstractmethod
|
2023-09-09 11:02:44 +02:00
|
|
|
def _load(self) -> None:
|
2023-06-25 05:18:09 +02:00
|
|
|
...
|
|
|
|
|
|
|
|
@property
|
|
|
|
def model_type(self) -> ModelType:
|
|
|
|
return self._model_type
|
|
|
|
|
|
|
|
@property
|
|
|
|
def cache_dir(self) -> Path:
|
2023-10-31 12:02:04 +02:00
|
|
|
return self._cache_dir if self._cache_dir is not None else get_cache_dir(self.model_name, self.model_type)
|
2023-06-25 05:18:09 +02:00
|
|
|
|
|
|
|
@cache_dir.setter
|
2023-06-27 23:01:24 +02:00
|
|
|
def cache_dir(self, cache_dir: Path) -> None:
|
2023-06-25 05:18:09 +02:00
|
|
|
self._cache_dir = cache_dir
|
|
|
|
|
2023-08-06 04:45:13 +02:00
|
|
|
@property
|
|
|
|
def cached(self) -> bool:
|
|
|
|
return self.cache_dir.exists() and any(self.cache_dir.iterdir())
|
|
|
|
|
2023-06-25 05:18:09 +02:00
|
|
|
@classmethod
|
2023-06-28 01:21:33 +02:00
|
|
|
def from_model_type(cls, model_type: ModelType, model_name: str, **model_kwargs: Any) -> InferenceModel:
|
|
|
|
subclasses = {subclass._model_type: subclass for subclass in cls.__subclasses__()}
|
2023-06-25 05:18:09 +02:00
|
|
|
if model_type not in subclasses:
|
|
|
|
raise ValueError(f"Unsupported model type: {model_type}")
|
|
|
|
|
|
|
|
return subclasses[model_type](model_name, **model_kwargs)
|
2023-06-27 23:01:24 +02:00
|
|
|
|
|
|
|
def clear_cache(self) -> None:
|
|
|
|
if not self.cache_dir.exists():
|
2023-08-30 10:22:01 +02:00
|
|
|
log.warn(
|
|
|
|
f"Attempted to clear cache for model '{self.model_name}' but cache directory does not exist.",
|
|
|
|
)
|
2023-06-27 23:01:24 +02:00
|
|
|
return
|
2023-08-06 04:45:13 +02:00
|
|
|
if not rmtree.avoids_symlink_attacks:
|
2023-06-28 01:21:33 +02:00
|
|
|
raise RuntimeError("Attempted to clear cache, but rmtree is not safe on this platform.")
|
2023-06-27 23:01:24 +02:00
|
|
|
|
2023-08-06 04:45:13 +02:00
|
|
|
if self.cache_dir.is_dir():
|
2023-08-30 10:22:01 +02:00
|
|
|
log.info(f"Cleared cache directory for model '{self.model_name}'.")
|
2023-08-06 04:45:13 +02:00
|
|
|
rmtree(self.cache_dir)
|
|
|
|
else:
|
2023-08-30 10:22:01 +02:00
|
|
|
log.warn(
|
|
|
|
(
|
|
|
|
f"Encountered file instead of directory at cache path "
|
|
|
|
f"for '{self.model_name}'. Removing file and replacing with a directory."
|
|
|
|
),
|
|
|
|
)
|
2023-08-06 04:45:13 +02:00
|
|
|
self.cache_dir.unlink()
|
|
|
|
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
2023-08-25 06:28:51 +02:00
|
|
|
|
|
|
|
|
|
|
|
# HF deep copies configs, so we need to make session options picklable
|
2023-11-13 18:18:46 +02:00
|
|
|
class PicklableSessionOptions(ort.SessionOptions): # type: ignore[misc]
|
2023-08-25 06:28:51 +02:00
|
|
|
def __getstate__(self) -> bytes:
|
|
|
|
return pickle.dumps([(attr, getattr(self, attr)) for attr in dir(self) if not callable(getattr(self, attr))])
|
|
|
|
|
2023-11-13 18:18:46 +02:00
|
|
|
def __setstate__(self, state: Buffer) -> None:
|
|
|
|
self.__init__() # type: ignore[misc]
|
|
|
|
attrs: list[tuple[str, Any]] = pickle.loads(state)
|
|
|
|
for attr, val in attrs:
|
2023-08-25 06:28:51 +02:00
|
|
|
setattr(self, attr, val)
|