wallaroo.deployment


class WaitForError(builtins.Exception):

Common base class for all non-exit exceptions.

WaitForError(message: str, status: Union[Dict[str, Any], NoneType])
Inherited Members
builtins.BaseException
with_traceback
args
class WaitForDeployError(builtins.RuntimeError):

Unspecified run-time error.

WaitForDeployError(message: str)
Inherited Members
builtins.BaseException
with_traceback
args
def hack_pandas_dataframe_order(df):
class Deployment(wallaroo.object.Object):

Base class for all backend GraphQL API objects.

This class serves as a framework for API objects to be constructed based on a partially-complete JSON response, and to fill in their remaining members dynamically if needed.

Deployment( client: Union[wallaroo.client.Client, NoneType], data: Dict[str, Any])

Base constructor.

Each object requires:

  • a GraphQL client - in order to fill its missing members dynamically
  • an initial data blob - typically from unserialized JSON, contains at
  • least the data for required members (typically the object's primary key) and optionally other data members.
def id(self) -> int:
def name(*args, **kwargs):
def deployed(*args, **kwargs):
def model_configs(*args, **kwargs):
def pipeline_versions(*args, **kwargs):
def pipeline_name(*args, **kwargs):
def engine_config(*args, **kwargs):
def deploy(self) -> wallaroo.deployment.Deployment:

Deploys this deployment, if it is not already deployed.

If the deployment is already deployed, this is a no-op.

def undeploy(self) -> wallaroo.deployment.Deployment:

Shuts down this deployment, if it is deployed.

If the deployment is already undeployed, this is a no-op.

def status(self) -> Dict[str, Any]:

Returns a dict of deployment status useful for determining if a deployment has succeeded.

Returns

Dict of deployment internal state information.

def check_limit_status(self):
def wait_for_running( self, timeout: Union[int, NoneType] = None) -> wallaroo.deployment.Deployment:

Waits for the deployment status to enter the "Running" state.

Will wait up "timeout_request" seconds for the deployment to enter that state. This is set in the "Client" object constructor. Will raise various exceptions on failures.

Returns

The deployment, for chaining.

def wait_for_undeployed(self) -> wallaroo.deployment.Deployment:

Waits for the deployment to end.

Will wait up "timeout_request" seconds for the deployment to enter that state. This is set in the "Client" object constructor. Will raise various exceptions on failures.

Returns

The deployment, for chaining.

def infer( self, tensor: Union[Dict[str, Any], List[Any], pandas.core.frame.DataFrame, pyarrow.lib.Table], timeout: Union[int, float, NoneType] = None, dataset: Union[List[str], NoneType] = None, dataset_exclude: Union[List[str], NoneType] = None, dataset_separator: Union[str, NoneType] = None):

Returns an inference result on this deployment, given a tensor.

Parameters
  • tensor: Union[Dict[str, Any], List[Any], pd.DataFrame, pa.Table]. The tensor to be sent to run inference on.
  • timeout: Optional[Union[int, float]] infer requests will time out after the amount of seconds provided are exceeded. timeout defaults to 15 secs.
  • dataset: Optional[List[str]] By default this is set to ["*"] which returns, ["time", "in", "out", "anomaly"]. Other available options - ["metadata"]
  • dataset_exclude: Optional[List[str]] If set, allows user to exclude parts of dataset.
  • dataset_separator: Optional[str] If set to ".", returned dataset will be flattened.
Returns

InferenceResult in dictionary, dataframe or arrow format.

def infer_from_file( self, filename: Union[str, pathlib.Path], data_format: Union[str, NoneType] = None, timeout: Union[int, float, NoneType] = None, dataset: Union[List[str], NoneType] = None, dataset_exclude: Union[List[str], NoneType] = None, dataset_separator: Union[str, NoneType] = None) -> Union[List[wallaroo.inference_result.InferenceResult], pandas.core.frame.DataFrame, pyarrow.lib.Table]:

This method is used to run inference on a deployment using a file. The file can be in one of the following formats: pandas.DataFrame: .arrow, .json which contains data either in the pandas.records format or wallaroo custom json format.

Parameters
  • filename: Union[str, pathlib.Path]. The file to be sent to run inference on.
  • data_format: Optional[str]. The format of the data in the file. If not provided, the format will be inferred from the file extension.
  • timeout: Optional[Union[int, float]] infer requests will time out after the amount of seconds provided are exceeded. timeout defaults to 15 secs.
  • dataset: Optional[List[str]] By default this is set to ["*"] which returns, ["time", "in", "out", "anomaly"]. Other available options - ["metadata"]
  • dataset_exclude: Optional[List[str]] If set, allows user to exclude parts of dataset.
  • dataset_separator: Optional[str] If set to ".", returned dataset will be flattened.
Returns

InferenceResult in dictionary, dataframe or arrow format.

async def async_infer( self, tensor: Union[Dict[str, Any], List[Any], pandas.core.frame.DataFrame, pyarrow.lib.Table], async_client: httpx.AsyncClient, timeout: Union[int, float, NoneType] = None, retries: Union[int, NoneType] = None, dataset: Union[List[str], NoneType] = None, dataset_exclude: Union[List[str], NoneType] = None, dataset_separator: Union[str, NoneType] = None):
def replace_model( self, model_version: wallaroo.model_version.ModelVersion) -> wallaroo.deployment.Deployment:

Replaces the current model with a default-configured Model.

Parameters
  • ModelVersion model_version: Model variant to replace current model with
def replace_configured_model( self, model_config: wallaroo.model_config.ModelConfig) -> wallaroo.deployment.Deployment:

Replaces the current model with a configured variant.

Parameters
  • ModelConfig model_config: Configured model to replace current model with
def internal_url(self) -> str:

Returns the internal inference URL that is only reachable from inside of the Wallaroo cluster by SDK instances deployed in the cluster.

If both pipelines and models are configured on the Deployment, this gives preference to pipelines. The returned URL is always for the first configured pipeline or model.

def url(self) -> str:

Returns the inference URL.

If both pipelines and models are configured on the Deployment, this gives preference to pipelines. The returned URL is always for the first configured pipeline or model.

def logs( self, limit: int = 100, valid: Union[bool, NoneType] = None) -> wallaroo.logs.LogEntries:

Deployment.logs() has been removed. Please use pipeline.logs() instead.