|
21 | 21 | from invokeai.app.util.step_callback import diffusion_step_callback
|
22 | 22 | from invokeai.backend.model_manager.config import (
|
23 | 23 | AnyModelConfig,
|
| 24 | + ModelConfigBase, |
24 | 25 | )
|
25 | 26 | from invokeai.backend.model_manager.load.load_base import LoadedModel, LoadedModelWithoutConfig
|
26 | 27 | from invokeai.backend.model_manager.taxonomy import AnyModel, BaseModelType, ModelFormat, ModelType, SubModelType
|
@@ -543,6 +544,30 @@ def load_remote_model(
|
543 | 544 | self._util.signal_progress(f"Loading model {source}")
|
544 | 545 | return self._services.model_manager.load.load_model_from_path(model_path=model_path, loader=loader)
|
545 | 546 |
|
| 547 | + def get_absolute_path(self, config_or_path: AnyModelConfig | Path | str) -> Path: |
| 548 | + """Gets the absolute path for a given model config or path. |
| 549 | +
|
| 550 | + For example, if the model's path is `flux/main/FLUX Dev.safetensors`, and the models path is |
| 551 | + `/home/username/InvokeAI/models`, this method will return |
| 552 | + `/home/username/InvokeAI/models/flux/main/FLUX Dev.safetensors`. |
| 553 | +
|
| 554 | + Args: |
| 555 | + config_or_path: The model config or path. |
| 556 | +
|
| 557 | + Returns: |
| 558 | + The absolute path to the model. |
| 559 | + """ |
| 560 | + |
| 561 | + model_path = Path(config_or_path.path) if isinstance(config_or_path, ModelConfigBase) else Path(config_or_path) |
| 562 | + |
| 563 | + if model_path.is_absolute(): |
| 564 | + return model_path.resolve() |
| 565 | + |
| 566 | + base_models_path = self._services.configuration.models_path |
| 567 | + joined_path = base_models_path / model_path |
| 568 | + resolved_path = joined_path.resolve() |
| 569 | + return resolved_path |
| 570 | + |
546 | 571 |
|
547 | 572 | class ConfigInterface(InvocationContextInterface):
|
548 | 573 | def get(self) -> InvokeAIAppConfig:
|
|
0 commit comments