Skip to content

Package

model_navigator.package.package

Package module - structure to snapshot optimization result.

Package

Package(status, workspace, model=None)

Class for storing pipeline execution status.

Initialize object.

Parameters:

  • status (Status) –

    A navigator execution status

  • workspace (Workspace) –

    Workspace for package files

  • model (Optional[object], default: None ) –

    An optional model

Source code in model_navigator/package/package.py
def __init__(self, status: Status, workspace: Workspace, model: Optional[object] = None):
    """Initialize object.

    Args:
        status: A navigator execution status
        workspace: Workspace for package files
        model: An optional model
    """
    self.status = status
    self.workspace = workspace
    self._model = model

config property

config: CommonConfig

Generate configuration from package.

Returns:

  • CommonConfig

    The configuration object

framework property

framework: Framework

Framework for which package was created.

Returns:

  • Framework

    Framework object for package

model property

model: object

Return source model.

Returns:

_create_status_file

_create_status_file()

Create a status.yaml file for package.

Source code in model_navigator/package/package.py
def _create_status_file(self) -> None:
    """Create a status.yaml file for package."""
    path = self.workspace.path / self.status_filename
    config = DataObject.filter_data(
        data=self.status.config,
        filter_fields=[
            "model",
            "dataloader",
            "verify_func",
            "workspace",
        ],
    )
    config = DataObject.parse_data(config)
    status = copy.copy(self.status)
    status.config = config
    data = status.to_dict(
        parse=True,
    )
    with path.open("w") as f:
        yaml.safe_dump(data, f, sort_keys=False)

_delete_status_file

_delete_status_file()

Delete the status.yaml file from package.

Source code in model_navigator/package/package.py
def _delete_status_file(self):
    """Delete the status.yaml file from package."""
    path = self.workspace.path / self.status_filename
    if path.exists():
        path.unlink()

_get_custom_configs

_get_custom_configs(custom_configs)

Build custom configs from config data.

Parameters:

Returns:

  • Dict

    List with mapped objects

Source code in model_navigator/package/package.py
def _get_custom_configs(self, custom_configs: Dict[str, Union[Dict, CustomConfigForFormat]]) -> Dict:
    """Build custom configs from config data.

    Args:
        custom_configs: Dictionary with custom configs data

    Returns:
        List with mapped objects
    """
    custom_configs_mapped = {}
    for class_name, obj in custom_configs.items():
        if isinstance(obj, dict):
            custom_config_class = CUSTOM_CONFIGS_MAPPING[class_name]
            obj = custom_config_class.from_dict(obj)  # pytype: disable=not-instantiable

        custom_configs_mapped[class_name] = obj

    return custom_configs_mapped

_get_runner

_get_runner(model_key, runner_name, return_type=TensorType.NUMPY)

Load runner.

Parameters:

  • model_key (str) –

    Unique key of the model.

  • runner_name (str) –

    Name of the runner.

  • return_type (TensorType, default: NUMPY ) –

    Type of the runner output.

Returns:

  • NavigatorRunner

    NavigatorRunner object

Source code in model_navigator/package/package.py
def _get_runner(
    self,
    model_key: str,
    runner_name: str,
    return_type: TensorType = TensorType.NUMPY,
) -> NavigatorRunner:
    """Load runner.

    Args:
        model_key (str): Unique key of the model.
        runner_name (str): Name of the runner.
        return_type (TensorType): Type of the runner output.

    Raises:
        ModelNavigatorNotFoundError when no runner found for provided constraints.

    Returns:
        NavigatorRunner object
    """
    try:
        model_config = self.status.models_status[model_key].model_config
    except KeyError:
        raise ModelNavigatorNotFoundError(f"Model {model_key} not found.")

    if is_source_format(model_config.format):
        model = self._model
    else:
        model = self.workspace.path / model_config.path
    return get_runner(runner_name)(
        model=model,
        input_metadata=self.status.input_metadata,
        output_metadata=self.status.output_metadata,
        return_type=return_type,
    )  # pytype: disable=not-instantiable

get_best_model_status

get_best_model_status(strategy=None, include_source=True)

Returns ModelStatus of best model for given strategy.

Parameters:

  • strategy (Optional[RuntimeSearchStrategy], default: None ) –

    Strategy for finding the best runtime. Defaults to MaxThroughputAndMinLatencyStrategy.

  • include_source (bool, default: True ) –

    Flag if Python based model has to be included in analysis

Returns:

  • ModelStatus

    ModelStatus of best model for given strategy or None.

Source code in model_navigator/package/package.py
def get_best_model_status(
    self,
    strategy: Optional[RuntimeSearchStrategy] = None,
    include_source: bool = True,
) -> ModelStatus:
    """Returns ModelStatus of best model for given strategy.

    Args:
        strategy: Strategy for finding the best runtime. Defaults to `MaxThroughputAndMinLatencyStrategy`.
        include_source: Flag if Python based model has to be included in analysis

    Returns:
        ModelStatus of best model for given strategy or None.
    """
    runtime_result = self._get_best_runtime(strategy=strategy, include_source=include_source)
    return runtime_result.model_status

get_model_path

get_model_path(model_key)

Return path of the model.

Parameters:

  • model_key (str) –

    Unique key of the model.

Raises:

  • ModelNavigatorNotFoundError

    When model not found.

Returns:

  • Path( Path ) –

    model path

Source code in model_navigator/package/package.py
def get_model_path(self, model_key: str) -> pathlib.Path:
    """Return path of the model.

    Args:
        model_key (str): Unique key of the model.

    Raises:
        ModelNavigatorNotFoundError: When model not found.

    Returns:
        Path: model path
    """
    try:
        model_config = self.status.models_status[model_key].model_config
    except KeyError:
        raise ModelNavigatorNotFoundError(f"Model {model_key} not found.")
    return self.workspace.path / model_config.path

get_runner

get_runner(strategy=None, include_source=True, return_type=TensorType.NUMPY)

Get the runner according to the strategy.

Parameters:

  • strategy (Optional[RuntimeSearchStrategy], default: None ) –

    Strategy for finding the best runtime. Defaults to MaxThroughputAndMinLatencyStrategy.

  • include_source (bool, default: True ) –

    Flag if Python based model has to be included in analysis

  • return_type (TensorType, default: NUMPY ) –

    The type of the output tensor. Defaults to TensorType.NUMPY. If the return_type supports CUDA tensors (e.g. TensorType.TORCH) and the input tensors are on CUDA, there will be no additional data transfer between CPU and GPU.

Returns:

  • NavigatorRunner

    The optimal runner for the optimized model.

Source code in model_navigator/package/package.py
def get_runner(
    self,
    strategy: Optional[RuntimeSearchStrategy] = None,
    include_source: bool = True,
    return_type: TensorType = TensorType.NUMPY,
) -> NavigatorRunner:
    """Get the runner according to the strategy.

    Args:
        strategy: Strategy for finding the best runtime. Defaults to `MaxThroughputAndMinLatencyStrategy`.
        include_source: Flag if Python based model has to be included in analysis
        return_type: The type of the output tensor. Defaults to `TensorType.NUMPY`.
            If the return_type supports CUDA tensors (e.g. TensorType.TORCH) and the input tensors are on CUDA,
            there will be no additional data transfer between CPU and GPU.

    Returns:
        The optimal runner for the optimized model.
    """
    runtime_result = self._get_best_runtime(strategy=strategy, include_source=include_source)

    model_config = runtime_result.model_status.model_config
    runner_status = runtime_result.runner_status

    if not is_source_format(model_config.format) and not (self.workspace.path / model_config.path).exists():
        raise ModelNavigatorNotFoundError(
            f"The best runner expects {model_config.format.value!r} "
            "model but it is not available in the loaded package."
        )

    if is_source_format(model_config.format) and self._model is None:
        raise ModelNavigatorMissingSourceModelError(
            "The best runner uses the source model but it is not available in the loaded package. "
            "Please load the source model with `package.load_source_model(model)` "
            "or exclude source model from optimal runner search "
            "with `package.get_runner(include_source=False)`."
        )

    return self._get_runner(model_config.key, runner_status.runner_name, return_type=return_type)

is_empty

is_empty()

Validate if package is empty - no models were produced.

Returns:

  • bool

    True if empty package, False otherwise.

Source code in model_navigator/package/package.py
def is_empty(self) -> bool:
    """Validate if package is empty - no models were produced.

    Returns:
        True if empty package, False otherwise.
    """
    for model_status in self.status.models_status.values():
        if not is_source_format(model_status.model_config.format):
            for runner_status in model_status.runners_status.values():
                if (
                    runner_status.status.get(Correctness.__name__) == CommandStatus.OK
                    and runner_status.status.get(Performance.__name__) != CommandStatus.FAIL
                    and (self.workspace.path / model_status.model_config.path.parent).exists()
                ):
                    return False
    return True

load_source_model

load_source_model(model)

Load model defined in Python code.

Parameters:

  • model (object) –

    A model object

Source code in model_navigator/package/package.py
def load_source_model(self, model: object) -> None:
    """Load model defined in Python code.

    Args:
        model: A model object
    """
    if self._model is not None:
        LOGGER.warning("Overriding existing source model.")
    self._model = model

save_status_file

save_status_file()

Save the status.yaml.

Source code in model_navigator/package/package.py
def save_status_file(self) -> None:
    """Save the status.yaml."""
    self._delete_status_file()
    self._create_status_file()