Module aiolirest.models.deployment_request

HPE Machine Learning Inference Software (MLIS/Aioli)

HPE MLIS is Aioli – The AI On-line Inference Platform that enables easy deployment, tracking, and serving of your packaged models regardless of your preferred AI framework.

The version of the OpenAPI document: 1.0.0 Contact: community@determined-ai Generated by OpenAPI Generator (https://openapi-generator.tech)

Do not edit the class manually.

Expand source code
# coding: utf-8

"""
    HPE Machine Learning Inference Software (MLIS/Aioli)

    HPE MLIS is *Aioli* -- The AI On-line Inference Platform that enables easy deployment, tracking, and serving of your packaged models regardless of your preferred AI framework.

    The version of the OpenAPI document: 1.0.0
    Contact: community@determined-ai
    Generated by OpenAPI Generator (https://openapi-generator.tech)

    Do not edit the class manually.
"""  # noqa: E501


from __future__ import annotations
import pprint
import re  # noqa: F401
import json


from typing import Any, ClassVar, Dict, List, Optional
from pydantic import BaseModel, StrictInt, StrictStr
from pydantic import Field
from aiolirest.models.autoscaling import Autoscaling
from aiolirest.models.security import Security
try:
    from typing import Self
except ImportError:
    from typing_extensions import Self

class DeploymentRequest(BaseModel):
    """
    Deployment describes the deployment of a service.
    """ # noqa: E501
    arguments: Optional[List[StrictStr]] = Field(default=None, description="Arguments to be added to the service command line")
    auto_scaling: Optional[Autoscaling] = Field(default=None, alias="autoScaling")
    canary_traffic_percent: Optional[StrictInt] = Field(default=None, description="Percent traffic to pass to new model version", alias="canaryTrafficPercent")
    environment: Optional[Dict[str, StrictStr]] = Field(default=None, description="Environment variables added to the service")
    goal_status: Optional[StrictStr] = Field(default='Ready', description="Specifies the intended status to be achieved by the deployment.  Supported values are: * `Ready` - The inference serivce will be deployed to enable inference calls. * `Paused` - The inference serivce will be stopped and no longer accept calls.  The default is `Ready`.", alias="goalStatus")
    model: StrictStr = Field(description="PackagedModel name or ID to be deployed.")
    name: StrictStr = Field(description="The deployment name.  It must be a valid subdomain name and consist of lower case alphanumeric characters, '-' or '.', and must start and end with an alphanumeric character.")
    namespace: Optional[StrictStr] = Field(default=None, description="The Kubernetes namespace to be used for the deployment.")
    security: Optional[Security] = None
    __properties: ClassVar[List[str]] = ["arguments", "autoScaling", "canaryTrafficPercent", "environment", "goalStatus", "model", "name", "namespace", "security"]

    model_config = {
        "populate_by_name": True,
        "validate_assignment": True
    }


    def to_str(self) -> str:
        """Returns the string representation of the model using alias"""
        return pprint.pformat(self.model_dump(by_alias=True))

    def to_json(self) -> str:
        """Returns the JSON representation of the model using alias"""
        # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
        return json.dumps(self.to_dict())

    @classmethod
    def from_json(cls, json_str: str) -> Self:
        """Create an instance of DeploymentRequest from a JSON string"""
        return cls.from_dict(json.loads(json_str))

    def to_dict(self) -> Dict[str, Any]:
        """Return the dictionary representation of the model using alias.

        This has the following differences from calling pydantic's
        `self.model_dump(by_alias=True)`:

        * `None` is only added to the output dict for nullable fields that
          were set at model initialization. Other fields with value `None`
          are ignored.
        """
        _dict = self.model_dump(
            by_alias=True,
            exclude={
            },
            exclude_none=True,
        )
        # override the default output from pydantic by calling `to_dict()` of auto_scaling
        if self.auto_scaling:
            _dict['autoScaling'] = self.auto_scaling.to_dict()
        # override the default output from pydantic by calling `to_dict()` of security
        if self.security:
            _dict['security'] = self.security.to_dict()
        return _dict

    @classmethod
    def from_dict(cls, obj: Dict) -> Self:
        """Create an instance of DeploymentRequest from a dict"""
        if obj is None:
            return None

        if not isinstance(obj, dict):
            return cls.model_validate(obj)

        _obj = cls.model_validate({
            "arguments": obj.get("arguments"),
            "autoScaling": Autoscaling.from_dict(obj.get("autoScaling")) if obj.get("autoScaling") is not None else None,
            "canaryTrafficPercent": obj.get("canaryTrafficPercent"),
            "environment": obj.get("environment"),
            "goalStatus": obj.get("goalStatus") if obj.get("goalStatus") is not None else 'Ready',
            "model": obj.get("model"),
            "name": obj.get("name"),
            "namespace": obj.get("namespace"),
            "security": Security.from_dict(obj.get("security")) if obj.get("security") is not None else None
        })
        return _obj

Classes

class DeploymentRequest (**data: Any)

Deployment describes the deployment of a service.

Create a new model by parsing and validating input data from keyword arguments.

Raises [ValidationError][pydantic_core.ValidationError] if the input data cannot be validated to form a valid model.

self is explicitly positional-only to allow self as a field name.

Expand source code
class DeploymentRequest(BaseModel):
    """
    Deployment describes the deployment of a service.
    """ # noqa: E501
    arguments: Optional[List[StrictStr]] = Field(default=None, description="Arguments to be added to the service command line")
    auto_scaling: Optional[Autoscaling] = Field(default=None, alias="autoScaling")
    canary_traffic_percent: Optional[StrictInt] = Field(default=None, description="Percent traffic to pass to new model version", alias="canaryTrafficPercent")
    environment: Optional[Dict[str, StrictStr]] = Field(default=None, description="Environment variables added to the service")
    goal_status: Optional[StrictStr] = Field(default='Ready', description="Specifies the intended status to be achieved by the deployment.  Supported values are: * `Ready` - The inference serivce will be deployed to enable inference calls. * `Paused` - The inference serivce will be stopped and no longer accept calls.  The default is `Ready`.", alias="goalStatus")
    model: StrictStr = Field(description="PackagedModel name or ID to be deployed.")
    name: StrictStr = Field(description="The deployment name.  It must be a valid subdomain name and consist of lower case alphanumeric characters, '-' or '.', and must start and end with an alphanumeric character.")
    namespace: Optional[StrictStr] = Field(default=None, description="The Kubernetes namespace to be used for the deployment.")
    security: Optional[Security] = None
    __properties: ClassVar[List[str]] = ["arguments", "autoScaling", "canaryTrafficPercent", "environment", "goalStatus", "model", "name", "namespace", "security"]

    model_config = {
        "populate_by_name": True,
        "validate_assignment": True
    }


    def to_str(self) -> str:
        """Returns the string representation of the model using alias"""
        return pprint.pformat(self.model_dump(by_alias=True))

    def to_json(self) -> str:
        """Returns the JSON representation of the model using alias"""
        # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
        return json.dumps(self.to_dict())

    @classmethod
    def from_json(cls, json_str: str) -> Self:
        """Create an instance of DeploymentRequest from a JSON string"""
        return cls.from_dict(json.loads(json_str))

    def to_dict(self) -> Dict[str, Any]:
        """Return the dictionary representation of the model using alias.

        This has the following differences from calling pydantic's
        `self.model_dump(by_alias=True)`:

        * `None` is only added to the output dict for nullable fields that
          were set at model initialization. Other fields with value `None`
          are ignored.
        """
        _dict = self.model_dump(
            by_alias=True,
            exclude={
            },
            exclude_none=True,
        )
        # override the default output from pydantic by calling `to_dict()` of auto_scaling
        if self.auto_scaling:
            _dict['autoScaling'] = self.auto_scaling.to_dict()
        # override the default output from pydantic by calling `to_dict()` of security
        if self.security:
            _dict['security'] = self.security.to_dict()
        return _dict

    @classmethod
    def from_dict(cls, obj: Dict) -> Self:
        """Create an instance of DeploymentRequest from a dict"""
        if obj is None:
            return None

        if not isinstance(obj, dict):
            return cls.model_validate(obj)

        _obj = cls.model_validate({
            "arguments": obj.get("arguments"),
            "autoScaling": Autoscaling.from_dict(obj.get("autoScaling")) if obj.get("autoScaling") is not None else None,
            "canaryTrafficPercent": obj.get("canaryTrafficPercent"),
            "environment": obj.get("environment"),
            "goalStatus": obj.get("goalStatus") if obj.get("goalStatus") is not None else 'Ready',
            "model": obj.get("model"),
            "name": obj.get("name"),
            "namespace": obj.get("namespace"),
            "security": Security.from_dict(obj.get("security")) if obj.get("security") is not None else None
        })
        return _obj

Ancestors

  • pydantic.main.BaseModel

Class variables

var arguments : Optional[List[str]]
var auto_scaling : Optional[Autoscaling]
var canary_traffic_percent : Optional[int]
var environment : Optional[Dict[str, str]]
var goal_status : Optional[str]
var model : str
var model_computed_fields
var model_config
var model_fields
var name : str
var namespace : Optional[str]
var security : Optional[Security]

Static methods

def from_dict(obj: Dict) ‑> Self

Create an instance of DeploymentRequest from a dict

Expand source code
@classmethod
def from_dict(cls, obj: Dict) -> Self:
    """Create an instance of DeploymentRequest from a dict"""
    if obj is None:
        return None

    if not isinstance(obj, dict):
        return cls.model_validate(obj)

    _obj = cls.model_validate({
        "arguments": obj.get("arguments"),
        "autoScaling": Autoscaling.from_dict(obj.get("autoScaling")) if obj.get("autoScaling") is not None else None,
        "canaryTrafficPercent": obj.get("canaryTrafficPercent"),
        "environment": obj.get("environment"),
        "goalStatus": obj.get("goalStatus") if obj.get("goalStatus") is not None else 'Ready',
        "model": obj.get("model"),
        "name": obj.get("name"),
        "namespace": obj.get("namespace"),
        "security": Security.from_dict(obj.get("security")) if obj.get("security") is not None else None
    })
    return _obj
def from_json(json_str: str) ‑> Self

Create an instance of DeploymentRequest from a JSON string

Expand source code
@classmethod
def from_json(cls, json_str: str) -> Self:
    """Create an instance of DeploymentRequest from a JSON string"""
    return cls.from_dict(json.loads(json_str))

Methods

def model_post_init(self: BaseModel, context: Any, /) ‑> None

This function is meant to behave like a BaseModel method to initialise private attributes.

It takes context as an argument since that's what pydantic-core passes when calling it.

Args

self
The BaseModel instance.
context
The context.
Expand source code
def init_private_attributes(self: BaseModel, context: Any, /) -> None:
    """This function is meant to behave like a BaseModel method to initialise private attributes.

    It takes context as an argument since that's what pydantic-core passes when calling it.

    Args:
        self: The BaseModel instance.
        context: The context.
    """
    if getattr(self, '__pydantic_private__', None) is None:
        pydantic_private = {}
        for name, private_attr in self.__private_attributes__.items():
            default = private_attr.get_default()
            if default is not PydanticUndefined:
                pydantic_private[name] = default
        object_setattr(self, '__pydantic_private__', pydantic_private)
def to_dict(self) ‑> Dict[str, Any]

Return the dictionary representation of the model using alias.

This has the following differences from calling pydantic's self.model_dump(by_alias=True):

  • None is only added to the output dict for nullable fields that were set at model initialization. Other fields with value None are ignored.
Expand source code
def to_dict(self) -> Dict[str, Any]:
    """Return the dictionary representation of the model using alias.

    This has the following differences from calling pydantic's
    `self.model_dump(by_alias=True)`:

    * `None` is only added to the output dict for nullable fields that
      were set at model initialization. Other fields with value `None`
      are ignored.
    """
    _dict = self.model_dump(
        by_alias=True,
        exclude={
        },
        exclude_none=True,
    )
    # override the default output from pydantic by calling `to_dict()` of auto_scaling
    if self.auto_scaling:
        _dict['autoScaling'] = self.auto_scaling.to_dict()
    # override the default output from pydantic by calling `to_dict()` of security
    if self.security:
        _dict['security'] = self.security.to_dict()
    return _dict
def to_json(self) ‑> str

Returns the JSON representation of the model using alias

Expand source code
def to_json(self) -> str:
    """Returns the JSON representation of the model using alias"""
    # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
    return json.dumps(self.to_dict())
def to_str(self) ‑> str

Returns the string representation of the model using alias

Expand source code
def to_str(self) -> str:
    """Returns the string representation of the model using alias"""
    return pprint.pformat(self.model_dump(by_alias=True))