# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import datetime
from typing import Any, Dict, List, Optional, Union
import msrest.serialization
from ._batch_ai_enums import *
[docs]class AppInsightsReference(msrest.serialization.Model):
"""Azure Application Insights information for performance counters reporting.
All required parameters must be populated in order to send to Azure.
:param component: Required. Azure Application Insights component resource ID.
:type component: ~batch_ai.models.ResourceId
:param instrumentation_key: Value of the Azure Application Insights instrumentation key.
:type instrumentation_key: str
:param instrumentation_key_secret_reference: KeyVault Store and Secret which contains Azure
Application Insights instrumentation key. One of instrumentationKey or
instrumentationKeySecretReference must be specified.
:type instrumentation_key_secret_reference: ~batch_ai.models.KeyVaultSecretReference
"""
_validation = {
'component': {'required': True},
}
_attribute_map = {
'component': {'key': 'component', 'type': 'ResourceId'},
'instrumentation_key': {'key': 'instrumentationKey', 'type': 'str'},
'instrumentation_key_secret_reference': {'key': 'instrumentationKeySecretReference', 'type': 'KeyVaultSecretReference'},
}
def __init__(
self,
*,
component: "ResourceId",
instrumentation_key: Optional[str] = None,
instrumentation_key_secret_reference: Optional["KeyVaultSecretReference"] = None,
**kwargs
):
super(AppInsightsReference, self).__init__(**kwargs)
self.component = component
self.instrumentation_key = instrumentation_key
self.instrumentation_key_secret_reference = instrumentation_key_secret_reference
[docs]class AutoScaleSettings(msrest.serialization.Model):
"""Auto-scale settings for the cluster. The system automatically scales the cluster up and down (within minimumNodeCount and maximumNodeCount) based on the number of queued and running jobs assigned to the cluster.
All required parameters must be populated in order to send to Azure.
:param minimum_node_count: Required. The minimum number of compute nodes the Batch AI service
will try to allocate for the cluster. Note, the actual number of nodes can be less than the
specified value if the subscription has not enough quota to fulfill the request.
:type minimum_node_count: int
:param maximum_node_count: Required. The maximum number of compute nodes the cluster can have.
:type maximum_node_count: int
:param initial_node_count: The number of compute nodes to allocate on cluster creation. Note
that this value is used only during cluster creation. Default: 0.
:type initial_node_count: int
"""
_validation = {
'minimum_node_count': {'required': True},
'maximum_node_count': {'required': True},
}
_attribute_map = {
'minimum_node_count': {'key': 'minimumNodeCount', 'type': 'int'},
'maximum_node_count': {'key': 'maximumNodeCount', 'type': 'int'},
'initial_node_count': {'key': 'initialNodeCount', 'type': 'int'},
}
def __init__(
self,
*,
minimum_node_count: int,
maximum_node_count: int,
initial_node_count: Optional[int] = 0,
**kwargs
):
super(AutoScaleSettings, self).__init__(**kwargs)
self.minimum_node_count = minimum_node_count
self.maximum_node_count = maximum_node_count
self.initial_node_count = initial_node_count
[docs]class AzureBlobFileSystemReference(msrest.serialization.Model):
"""Azure Blob Storage Container mounting configuration.
All required parameters must be populated in order to send to Azure.
:param account_name: Required. Name of the Azure storage account.
:type account_name: str
:param container_name: Required. Name of the Azure Blob Storage container to mount on the
cluster.
:type container_name: str
:param credentials: Required. Information about the Azure storage credentials.
:type credentials: ~batch_ai.models.AzureStorageCredentialsInfo
:param relative_mount_path: Required. The relative path on the compute node where the Azure
File container will be mounted. Note that all cluster level containers will be mounted under
$AZ_BATCHAI_MOUNT_ROOT location and all job level containers will be mounted under
$AZ_BATCHAI_JOB_MOUNT_ROOT.
:type relative_mount_path: str
:param mount_options: Mount options for mounting blobfuse file system.
:type mount_options: str
"""
_validation = {
'account_name': {'required': True},
'container_name': {'required': True},
'credentials': {'required': True},
'relative_mount_path': {'required': True},
}
_attribute_map = {
'account_name': {'key': 'accountName', 'type': 'str'},
'container_name': {'key': 'containerName', 'type': 'str'},
'credentials': {'key': 'credentials', 'type': 'AzureStorageCredentialsInfo'},
'relative_mount_path': {'key': 'relativeMountPath', 'type': 'str'},
'mount_options': {'key': 'mountOptions', 'type': 'str'},
}
def __init__(
self,
*,
account_name: str,
container_name: str,
credentials: "AzureStorageCredentialsInfo",
relative_mount_path: str,
mount_options: Optional[str] = None,
**kwargs
):
super(AzureBlobFileSystemReference, self).__init__(**kwargs)
self.account_name = account_name
self.container_name = container_name
self.credentials = credentials
self.relative_mount_path = relative_mount_path
self.mount_options = mount_options
[docs]class AzureFileShareReference(msrest.serialization.Model):
"""Azure File Share mounting configuration.
All required parameters must be populated in order to send to Azure.
:param account_name: Required. Name of the Azure storage account.
:type account_name: str
:param azure_file_url: Required. URL to access the Azure File.
:type azure_file_url: str
:param credentials: Required. Information about the Azure storage credentials.
:type credentials: ~batch_ai.models.AzureStorageCredentialsInfo
:param relative_mount_path: Required. The relative path on the compute node where the Azure
File share will be mounted. Note that all cluster level file shares will be mounted under
$AZ_BATCHAI_MOUNT_ROOT location and all job level file shares will be mounted under
$AZ_BATCHAI_JOB_MOUNT_ROOT.
:type relative_mount_path: str
:param file_mode: File mode for files on the mounted file share. Default value: 0777.
:type file_mode: str
:param directory_mode: File mode for directories on the mounted file share. Default value:
0777.
:type directory_mode: str
"""
_validation = {
'account_name': {'required': True},
'azure_file_url': {'required': True},
'credentials': {'required': True},
'relative_mount_path': {'required': True},
}
_attribute_map = {
'account_name': {'key': 'accountName', 'type': 'str'},
'azure_file_url': {'key': 'azureFileUrl', 'type': 'str'},
'credentials': {'key': 'credentials', 'type': 'AzureStorageCredentialsInfo'},
'relative_mount_path': {'key': 'relativeMountPath', 'type': 'str'},
'file_mode': {'key': 'fileMode', 'type': 'str'},
'directory_mode': {'key': 'directoryMode', 'type': 'str'},
}
def __init__(
self,
*,
account_name: str,
azure_file_url: str,
credentials: "AzureStorageCredentialsInfo",
relative_mount_path: str,
file_mode: Optional[str] = "0777",
directory_mode: Optional[str] = "0777",
**kwargs
):
super(AzureFileShareReference, self).__init__(**kwargs)
self.account_name = account_name
self.azure_file_url = azure_file_url
self.credentials = credentials
self.relative_mount_path = relative_mount_path
self.file_mode = file_mode
self.directory_mode = directory_mode
[docs]class AzureStorageCredentialsInfo(msrest.serialization.Model):
"""Azure storage account credentials.
:param account_key: Storage account key. One of accountKey or accountKeySecretReference must be
specified.
:type account_key: str
:param account_key_secret_reference: Information about KeyVault secret storing the storage
account key. One of accountKey or accountKeySecretReference must be specified.
:type account_key_secret_reference: ~batch_ai.models.KeyVaultSecretReference
"""
_attribute_map = {
'account_key': {'key': 'accountKey', 'type': 'str'},
'account_key_secret_reference': {'key': 'accountKeySecretReference', 'type': 'KeyVaultSecretReference'},
}
def __init__(
self,
*,
account_key: Optional[str] = None,
account_key_secret_reference: Optional["KeyVaultSecretReference"] = None,
**kwargs
):
super(AzureStorageCredentialsInfo, self).__init__(**kwargs)
self.account_key = account_key
self.account_key_secret_reference = account_key_secret_reference
[docs]class BatchAIError(msrest.serialization.Model):
"""An error response from the Batch AI service.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar code: An identifier of the error. Codes are invariant and are intended to be consumed
programmatically.
:vartype code: str
:ivar message: A message describing the error, intended to be suitable for display in a user
interface.
:vartype message: str
:ivar details: A list of additional details about the error.
:vartype details: list[~batch_ai.models.NameValuePair]
"""
_validation = {
'code': {'readonly': True},
'message': {'readonly': True},
'details': {'readonly': True},
}
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'details': {'key': 'details', 'type': '[NameValuePair]'},
}
def __init__(
self,
**kwargs
):
super(BatchAIError, self).__init__(**kwargs)
self.code = None
self.message = None
self.details = None
[docs]class Caffe2Settings(msrest.serialization.Model):
"""Caffe2 job settings.
All required parameters must be populated in order to send to Azure.
:param python_script_file_path: Required. The python script to execute.
:type python_script_file_path: str
:param python_interpreter_path: The path to the Python interpreter.
:type python_interpreter_path: str
:param command_line_args: Command line arguments that need to be passed to the python script.
:type command_line_args: str
"""
_validation = {
'python_script_file_path': {'required': True},
}
_attribute_map = {
'python_script_file_path': {'key': 'pythonScriptFilePath', 'type': 'str'},
'python_interpreter_path': {'key': 'pythonInterpreterPath', 'type': 'str'},
'command_line_args': {'key': 'commandLineArgs', 'type': 'str'},
}
def __init__(
self,
*,
python_script_file_path: str,
python_interpreter_path: Optional[str] = None,
command_line_args: Optional[str] = None,
**kwargs
):
super(Caffe2Settings, self).__init__(**kwargs)
self.python_script_file_path = python_script_file_path
self.python_interpreter_path = python_interpreter_path
self.command_line_args = command_line_args
[docs]class CaffeSettings(msrest.serialization.Model):
"""Caffe job settings.
:param config_file_path: Path of the config file for the job. This property cannot be specified
if pythonScriptFilePath is specified.
:type config_file_path: str
:param python_script_file_path: Python script to execute. This property cannot be specified if
configFilePath is specified.
:type python_script_file_path: str
:param python_interpreter_path: The path to the Python interpreter. The property can be
specified only if the pythonScriptFilePath is specified.
:type python_interpreter_path: str
:param command_line_args: Command line arguments that need to be passed to the Caffe job.
:type command_line_args: str
:param process_count: Number of processes to launch for the job execution. The default value
for this property is equal to nodeCount property.
:type process_count: int
"""
_attribute_map = {
'config_file_path': {'key': 'configFilePath', 'type': 'str'},
'python_script_file_path': {'key': 'pythonScriptFilePath', 'type': 'str'},
'python_interpreter_path': {'key': 'pythonInterpreterPath', 'type': 'str'},
'command_line_args': {'key': 'commandLineArgs', 'type': 'str'},
'process_count': {'key': 'processCount', 'type': 'int'},
}
def __init__(
self,
*,
config_file_path: Optional[str] = None,
python_script_file_path: Optional[str] = None,
python_interpreter_path: Optional[str] = None,
command_line_args: Optional[str] = None,
process_count: Optional[int] = None,
**kwargs
):
super(CaffeSettings, self).__init__(**kwargs)
self.config_file_path = config_file_path
self.python_script_file_path = python_script_file_path
self.python_interpreter_path = python_interpreter_path
self.command_line_args = command_line_args
self.process_count = process_count
[docs]class ChainerSettings(msrest.serialization.Model):
"""Chainer job settings.
All required parameters must be populated in order to send to Azure.
:param python_script_file_path: Required. The python script to execute.
:type python_script_file_path: str
:param python_interpreter_path: The path to the Python interpreter.
:type python_interpreter_path: str
:param command_line_args: Command line arguments that need to be passed to the python script.
:type command_line_args: str
:param process_count: Number of processes to launch for the job execution. The default value
for this property is equal to nodeCount property.
:type process_count: int
"""
_validation = {
'python_script_file_path': {'required': True},
}
_attribute_map = {
'python_script_file_path': {'key': 'pythonScriptFilePath', 'type': 'str'},
'python_interpreter_path': {'key': 'pythonInterpreterPath', 'type': 'str'},
'command_line_args': {'key': 'commandLineArgs', 'type': 'str'},
'process_count': {'key': 'processCount', 'type': 'int'},
}
def __init__(
self,
*,
python_script_file_path: str,
python_interpreter_path: Optional[str] = None,
command_line_args: Optional[str] = None,
process_count: Optional[int] = None,
**kwargs
):
super(ChainerSettings, self).__init__(**kwargs)
self.python_script_file_path = python_script_file_path
self.python_interpreter_path = python_interpreter_path
self.command_line_args = command_line_args
self.process_count = process_count
[docs]class CloudErrorBody(msrest.serialization.Model):
"""An error response from the Batch AI service.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar code: An identifier for the error. Codes are invariant and are intended to be consumed
programmatically.
:vartype code: str
:ivar message: A message describing the error, intended to be suitable for display in a user
interface.
:vartype message: str
:ivar target: The target of the particular error. For example, the name of the property in
error.
:vartype target: str
:ivar details: A list of additional details about the error.
:vartype details: list[~batch_ai.models.CloudErrorBody]
"""
_validation = {
'code': {'readonly': True},
'message': {'readonly': True},
'target': {'readonly': True},
'details': {'readonly': True},
}
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'details': {'key': 'details', 'type': '[CloudErrorBody]'},
}
def __init__(
self,
**kwargs
):
super(CloudErrorBody, self).__init__(**kwargs)
self.code = None
self.message = None
self.target = None
self.details = None
[docs]class ProxyResource(msrest.serialization.Model):
"""A definition of an Azure proxy resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The ID of the resource.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ProxyResource, self).__init__(**kwargs)
self.id = None
self.name = None
self.type = None
[docs]class Cluster(ProxyResource):
"""Information about a Cluster.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The ID of the resource.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:param vm_size: The size of the virtual machines in the cluster. All nodes in a cluster have
the same VM size.
:type vm_size: str
:param vm_priority: VM priority of cluster nodes. Possible values include: "dedicated",
"lowpriority".
:type vm_priority: str or ~batch_ai.models.VmPriority
:param scale_settings: Scale settings of the cluster.
:type scale_settings: ~batch_ai.models.ScaleSettings
:param virtual_machine_configuration: Virtual machine configuration (OS image) of the compute
nodes. All nodes in a cluster have the same OS image configuration.
:type virtual_machine_configuration: ~batch_ai.models.VirtualMachineConfiguration
:param node_setup: Setup (mount file systems, performance counters settings and custom setup
task) to be performed on each compute node in the cluster.
:type node_setup: ~batch_ai.models.NodeSetup
:param user_account_settings: Administrator user account settings which can be used to SSH to
compute nodes.
:type user_account_settings: ~batch_ai.models.UserAccountSettings
:param subnet: Virtual network subnet resource ID the cluster nodes belong to.
:type subnet: ~batch_ai.models.ResourceId
:ivar creation_time: The time when the cluster was created.
:vartype creation_time: ~datetime.datetime
:ivar provisioning_state: Provisioning state of the cluster. Possible value are: creating -
Specifies that the cluster is being created. succeeded - Specifies that the cluster has been
created successfully. failed - Specifies that the cluster creation has failed. deleting -
Specifies that the cluster is being deleted. Possible values include: "creating", "succeeded",
"failed", "deleting".
:vartype provisioning_state: str or ~batch_ai.models.ProvisioningState
:ivar provisioning_state_transition_time: Time when the provisioning state was changed.
:vartype provisioning_state_transition_time: ~datetime.datetime
:ivar allocation_state: Allocation state of the cluster. Possible values are: steady -
Indicates that the cluster is not resizing. There are no changes to the number of compute nodes
in the cluster in progress. A cluster enters this state when it is created and when no
operations are being performed on the cluster to change the number of compute nodes. resizing -
Indicates that the cluster is resizing; that is, compute nodes are being added to or removed
from the cluster. Possible values include: "steady", "resizing".
:vartype allocation_state: str or ~batch_ai.models.AllocationState
:ivar allocation_state_transition_time: The time at which the cluster entered its current
allocation state.
:vartype allocation_state_transition_time: ~datetime.datetime
:ivar errors: Collection of errors encountered by various compute nodes during node setup.
:vartype errors: list[~batch_ai.models.BatchAIError]
:ivar current_node_count: The number of compute nodes currently assigned to the cluster.
:vartype current_node_count: int
:ivar node_state_counts: Counts of various node states on the cluster.
:vartype node_state_counts: ~batch_ai.models.NodeStateCounts
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'creation_time': {'readonly': True},
'provisioning_state': {'readonly': True},
'provisioning_state_transition_time': {'readonly': True},
'allocation_state': {'readonly': True},
'allocation_state_transition_time': {'readonly': True},
'errors': {'readonly': True},
'current_node_count': {'readonly': True},
'node_state_counts': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'vm_size': {'key': 'properties.vmSize', 'type': 'str'},
'vm_priority': {'key': 'properties.vmPriority', 'type': 'str'},
'scale_settings': {'key': 'properties.scaleSettings', 'type': 'ScaleSettings'},
'virtual_machine_configuration': {'key': 'properties.virtualMachineConfiguration', 'type': 'VirtualMachineConfiguration'},
'node_setup': {'key': 'properties.nodeSetup', 'type': 'NodeSetup'},
'user_account_settings': {'key': 'properties.userAccountSettings', 'type': 'UserAccountSettings'},
'subnet': {'key': 'properties.subnet', 'type': 'ResourceId'},
'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'provisioning_state_transition_time': {'key': 'properties.provisioningStateTransitionTime', 'type': 'iso-8601'},
'allocation_state': {'key': 'properties.allocationState', 'type': 'str'},
'allocation_state_transition_time': {'key': 'properties.allocationStateTransitionTime', 'type': 'iso-8601'},
'errors': {'key': 'properties.errors', 'type': '[BatchAIError]'},
'current_node_count': {'key': 'properties.currentNodeCount', 'type': 'int'},
'node_state_counts': {'key': 'properties.nodeStateCounts', 'type': 'NodeStateCounts'},
}
def __init__(
self,
*,
vm_size: Optional[str] = None,
vm_priority: Optional[Union[str, "VmPriority"]] = None,
scale_settings: Optional["ScaleSettings"] = None,
virtual_machine_configuration: Optional["VirtualMachineConfiguration"] = None,
node_setup: Optional["NodeSetup"] = None,
user_account_settings: Optional["UserAccountSettings"] = None,
subnet: Optional["ResourceId"] = None,
**kwargs
):
super(Cluster, self).__init__(**kwargs)
self.vm_size = vm_size
self.vm_priority = vm_priority
self.scale_settings = scale_settings
self.virtual_machine_configuration = virtual_machine_configuration
self.node_setup = node_setup
self.user_account_settings = user_account_settings
self.subnet = subnet
self.creation_time = None
self.provisioning_state = None
self.provisioning_state_transition_time = None
self.allocation_state = None
self.allocation_state_transition_time = None
self.errors = None
self.current_node_count = None
self.node_state_counts = None
[docs]class ClusterCreateParameters(msrest.serialization.Model):
"""Cluster creation operation.
:param vm_size: The size of the virtual machines in the cluster. All nodes in a cluster have
the same VM size. For information about available VM sizes for clusters using images from the
Virtual Machines Marketplace see Sizes for Virtual Machines (Linux). Batch AI service supports
all Azure VM sizes except STANDARD_A0 and those with premium storage (STANDARD_GS, STANDARD_DS,
and STANDARD_DSV2 series).
:type vm_size: str
:param vm_priority: VM priority. Allowed values are: dedicated (default) and lowpriority.
Possible values include: "dedicated", "lowpriority".
:type vm_priority: str or ~batch_ai.models.VmPriority
:param scale_settings: Scale settings for the cluster. Batch AI service supports manual and
auto scale clusters.
:type scale_settings: ~batch_ai.models.ScaleSettings
:param virtual_machine_configuration: OS image configuration for cluster nodes. All nodes in a
cluster have the same OS image.
:type virtual_machine_configuration: ~batch_ai.models.VirtualMachineConfiguration
:param node_setup: Setup to be performed on each compute node in the cluster.
:type node_setup: ~batch_ai.models.NodeSetup
:param user_account_settings: Settings for an administrator user account that will be created
on each compute node in the cluster.
:type user_account_settings: ~batch_ai.models.UserAccountSettings
:param subnet: Existing virtual network subnet to put the cluster nodes in. Note, if a File
Server mount configured in node setup, the File Server's subnet will be used automatically.
:type subnet: ~batch_ai.models.ResourceId
"""
_attribute_map = {
'vm_size': {'key': 'properties.vmSize', 'type': 'str'},
'vm_priority': {'key': 'properties.vmPriority', 'type': 'str'},
'scale_settings': {'key': 'properties.scaleSettings', 'type': 'ScaleSettings'},
'virtual_machine_configuration': {'key': 'properties.virtualMachineConfiguration', 'type': 'VirtualMachineConfiguration'},
'node_setup': {'key': 'properties.nodeSetup', 'type': 'NodeSetup'},
'user_account_settings': {'key': 'properties.userAccountSettings', 'type': 'UserAccountSettings'},
'subnet': {'key': 'properties.subnet', 'type': 'ResourceId'},
}
def __init__(
self,
*,
vm_size: Optional[str] = None,
vm_priority: Optional[Union[str, "VmPriority"]] = None,
scale_settings: Optional["ScaleSettings"] = None,
virtual_machine_configuration: Optional["VirtualMachineConfiguration"] = None,
node_setup: Optional["NodeSetup"] = None,
user_account_settings: Optional["UserAccountSettings"] = None,
subnet: Optional["ResourceId"] = None,
**kwargs
):
super(ClusterCreateParameters, self).__init__(**kwargs)
self.vm_size = vm_size
self.vm_priority = vm_priority
self.scale_settings = scale_settings
self.virtual_machine_configuration = virtual_machine_configuration
self.node_setup = node_setup
self.user_account_settings = user_account_settings
self.subnet = subnet
[docs]class ClusterListResult(msrest.serialization.Model):
"""Values returned by the List Clusters operation.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: The collection of returned Clusters.
:vartype value: list[~batch_ai.models.Cluster]
:ivar next_link: The continuation token.
:vartype next_link: str
"""
_validation = {
'value': {'readonly': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Cluster]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ClusterListResult, self).__init__(**kwargs)
self.value = None
self.next_link = None
[docs]class ClustersListByWorkspaceOptions(msrest.serialization.Model):
"""Parameter group.
:param max_results: The maximum number of items to return in the response. A maximum of 1000
files can be returned.
:type max_results: int
"""
_validation = {
'max_results': {'maximum': 1000, 'minimum': 1},
}
_attribute_map = {
'max_results': {'key': 'maxResults', 'type': 'int'},
}
def __init__(
self,
*,
max_results: Optional[int] = 1000,
**kwargs
):
super(ClustersListByWorkspaceOptions, self).__init__(**kwargs)
self.max_results = max_results
[docs]class ClusterUpdateParameters(msrest.serialization.Model):
"""Cluster update parameters.
:param scale_settings: Desired scale settings for the cluster. Batch AI service supports manual
and auto scale clusters.
:type scale_settings: ~batch_ai.models.ScaleSettings
"""
_attribute_map = {
'scale_settings': {'key': 'properties.scaleSettings', 'type': 'ScaleSettings'},
}
def __init__(
self,
*,
scale_settings: Optional["ScaleSettings"] = None,
**kwargs
):
super(ClusterUpdateParameters, self).__init__(**kwargs)
self.scale_settings = scale_settings
[docs]class CNTKsettings(msrest.serialization.Model):
"""CNTK (aka Microsoft Cognitive Toolkit) job settings.
:param language_type: The language to use for launching CNTK (aka Microsoft Cognitive Toolkit)
job. Valid values are 'BrainScript' or 'Python'.
:type language_type: str
:param config_file_path: Specifies the path of the BrainScript config file. This property can
be specified only if the languageType is 'BrainScript'.
:type config_file_path: str
:param python_script_file_path: Python script to execute. This property can be specified only
if the languageType is 'Python'.
:type python_script_file_path: str
:param python_interpreter_path: The path to the Python interpreter. This property can be
specified only if the languageType is 'Python'.
:type python_interpreter_path: str
:param command_line_args: Command line arguments that need to be passed to the python script or
cntk executable.
:type command_line_args: str
:param process_count: Number of processes to launch for the job execution. The default value
for this property is equal to nodeCount property.
:type process_count: int
"""
_attribute_map = {
'language_type': {'key': 'languageType', 'type': 'str'},
'config_file_path': {'key': 'configFilePath', 'type': 'str'},
'python_script_file_path': {'key': 'pythonScriptFilePath', 'type': 'str'},
'python_interpreter_path': {'key': 'pythonInterpreterPath', 'type': 'str'},
'command_line_args': {'key': 'commandLineArgs', 'type': 'str'},
'process_count': {'key': 'processCount', 'type': 'int'},
}
def __init__(
self,
*,
language_type: Optional[str] = None,
config_file_path: Optional[str] = None,
python_script_file_path: Optional[str] = None,
python_interpreter_path: Optional[str] = None,
command_line_args: Optional[str] = None,
process_count: Optional[int] = None,
**kwargs
):
super(CNTKsettings, self).__init__(**kwargs)
self.language_type = language_type
self.config_file_path = config_file_path
self.python_script_file_path = python_script_file_path
self.python_interpreter_path = python_interpreter_path
self.command_line_args = command_line_args
self.process_count = process_count
[docs]class ContainerSettings(msrest.serialization.Model):
"""Docker container settings.
All required parameters must be populated in order to send to Azure.
:param image_source_registry: Required. Information about docker image and docker registry to
download the container from.
:type image_source_registry: ~batch_ai.models.ImageSourceRegistry
:param shm_size: Size of /dev/shm. Please refer to docker documentation for supported argument
formats.
:type shm_size: str
"""
_validation = {
'image_source_registry': {'required': True},
}
_attribute_map = {
'image_source_registry': {'key': 'imageSourceRegistry', 'type': 'ImageSourceRegistry'},
'shm_size': {'key': 'shmSize', 'type': 'str'},
}
def __init__(
self,
*,
image_source_registry: "ImageSourceRegistry",
shm_size: Optional[str] = None,
**kwargs
):
super(ContainerSettings, self).__init__(**kwargs)
self.image_source_registry = image_source_registry
self.shm_size = shm_size
[docs]class CustomMpiSettings(msrest.serialization.Model):
"""Custom MPI job settings.
All required parameters must be populated in order to send to Azure.
:param command_line: Required. The command line to be executed by mpi runtime on each compute
node.
:type command_line: str
:param process_count: Number of processes to launch for the job execution. The default value
for this property is equal to nodeCount property.
:type process_count: int
"""
_validation = {
'command_line': {'required': True},
}
_attribute_map = {
'command_line': {'key': 'commandLine', 'type': 'str'},
'process_count': {'key': 'processCount', 'type': 'int'},
}
def __init__(
self,
*,
command_line: str,
process_count: Optional[int] = None,
**kwargs
):
super(CustomMpiSettings, self).__init__(**kwargs)
self.command_line = command_line
self.process_count = process_count
[docs]class DataDisks(msrest.serialization.Model):
"""Data disks settings.
All required parameters must be populated in order to send to Azure.
:param disk_size_in_gb: Required. Disk size in GB for the blank data disks.
:type disk_size_in_gb: int
:param caching_type: Caching type for the disks. Available values are none (default), readonly,
readwrite. Caching type can be set only for VM sizes supporting premium storage. Possible
values include: "none", "readonly", "readwrite". Default value: "none".
:type caching_type: str or ~batch_ai.models.CachingType
:param disk_count: Required. Number of data disks attached to the File Server. If multiple
disks attached, they will be configured in RAID level 0.
:type disk_count: int
:param storage_account_type: Required. Type of storage account to be used on the disk. Possible
values are: Standard_LRS or Premium_LRS. Premium storage account type can only be used with VM
sizes supporting premium storage. Possible values include: "Standard_LRS", "Premium_LRS".
:type storage_account_type: str or ~batch_ai.models.StorageAccountType
"""
_validation = {
'disk_size_in_gb': {'required': True},
'disk_count': {'required': True},
'storage_account_type': {'required': True},
}
_attribute_map = {
'disk_size_in_gb': {'key': 'diskSizeInGB', 'type': 'int'},
'caching_type': {'key': 'cachingType', 'type': 'str'},
'disk_count': {'key': 'diskCount', 'type': 'int'},
'storage_account_type': {'key': 'storageAccountType', 'type': 'str'},
}
def __init__(
self,
*,
disk_size_in_gb: int,
disk_count: int,
storage_account_type: Union[str, "StorageAccountType"],
caching_type: Optional[Union[str, "CachingType"]] = "none",
**kwargs
):
super(DataDisks, self).__init__(**kwargs)
self.disk_size_in_gb = disk_size_in_gb
self.caching_type = caching_type
self.disk_count = disk_count
self.storage_account_type = storage_account_type
[docs]class EnvironmentVariable(msrest.serialization.Model):
"""An environment variable definition.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the environment variable.
:type name: str
:param value: Required. The value of the environment variable.
:type value: str
"""
_validation = {
'name': {'required': True},
'value': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
*,
name: str,
value: str,
**kwargs
):
super(EnvironmentVariable, self).__init__(**kwargs)
self.name = name
self.value = value
[docs]class EnvironmentVariableWithSecretValue(msrest.serialization.Model):
"""An environment variable with secret value definition.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the environment variable to store the secret value.
:type name: str
:param value: The value of the environment variable. This value will never be reported back by
Batch AI.
:type value: str
:param value_secret_reference: KeyVault store and secret which contains the value for the
environment variable. One of value or valueSecretReference must be provided.
:type value_secret_reference: ~batch_ai.models.KeyVaultSecretReference
"""
_validation = {
'name': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
'value_secret_reference': {'key': 'valueSecretReference', 'type': 'KeyVaultSecretReference'},
}
def __init__(
self,
*,
name: str,
value: Optional[str] = None,
value_secret_reference: Optional["KeyVaultSecretReference"] = None,
**kwargs
):
super(EnvironmentVariableWithSecretValue, self).__init__(**kwargs)
self.name = name
self.value = value
self.value_secret_reference = value_secret_reference
[docs]class Experiment(ProxyResource):
"""Experiment information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The ID of the resource.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar creation_time: Time when the Experiment was created.
:vartype creation_time: ~datetime.datetime
:ivar provisioning_state: The provisioned state of the experiment. Possible values include:
"creating", "succeeded", "failed", "deleting".
:vartype provisioning_state: str or ~batch_ai.models.ProvisioningState
:ivar provisioning_state_transition_time: The time at which the experiment entered its current
provisioning state.
:vartype provisioning_state_transition_time: ~datetime.datetime
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'creation_time': {'readonly': True},
'provisioning_state': {'readonly': True},
'provisioning_state_transition_time': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'provisioning_state_transition_time': {'key': 'properties.provisioningStateTransitionTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(Experiment, self).__init__(**kwargs)
self.creation_time = None
self.provisioning_state = None
self.provisioning_state_transition_time = None
[docs]class ExperimentListResult(msrest.serialization.Model):
"""Values returned by the List operation.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: The collection of experiments.
:vartype value: list[~batch_ai.models.Experiment]
:ivar next_link: The continuation token.
:vartype next_link: str
"""
_validation = {
'value': {'readonly': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Experiment]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExperimentListResult, self).__init__(**kwargs)
self.value = None
self.next_link = None
[docs]class ExperimentsListByWorkspaceOptions(msrest.serialization.Model):
"""Parameter group.
:param max_results: The maximum number of items to return in the response. A maximum of 1000
files can be returned.
:type max_results: int
"""
_validation = {
'max_results': {'maximum': 1000, 'minimum': 1},
}
_attribute_map = {
'max_results': {'key': 'maxResults', 'type': 'int'},
}
def __init__(
self,
*,
max_results: Optional[int] = 1000,
**kwargs
):
super(ExperimentsListByWorkspaceOptions, self).__init__(**kwargs)
self.max_results = max_results
[docs]class File(msrest.serialization.Model):
"""Properties of the file or directory.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar name: Name of the file.
:vartype name: str
:ivar file_type: Type of the file. Possible values are file and directory. Possible values
include: "file", "directory".
:vartype file_type: str or ~batch_ai.models.FileType
:ivar download_url: URL to download the corresponding file. The downloadUrl is not returned for
directories.
:vartype download_url: str
:ivar last_modified: The time at which the file was last modified.
:vartype last_modified: ~datetime.datetime
:ivar content_length: The file of the size.
:vartype content_length: long
"""
_validation = {
'name': {'readonly': True},
'file_type': {'readonly': True},
'download_url': {'readonly': True},
'last_modified': {'readonly': True},
'content_length': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'file_type': {'key': 'fileType', 'type': 'str'},
'download_url': {'key': 'downloadUrl', 'type': 'str'},
'last_modified': {'key': 'properties.lastModified', 'type': 'iso-8601'},
'content_length': {'key': 'properties.contentLength', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
super(File, self).__init__(**kwargs)
self.name = None
self.file_type = None
self.download_url = None
self.last_modified = None
self.content_length = None
[docs]class FileListResult(msrest.serialization.Model):
"""Values returned by the List operation.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: The collection of returned job directories and files.
:vartype value: list[~batch_ai.models.File]
:ivar next_link: The continuation token.
:vartype next_link: str
"""
_validation = {
'value': {'readonly': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[File]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(FileListResult, self).__init__(**kwargs)
self.value = None
self.next_link = None
[docs]class FileServer(ProxyResource):
"""File Server information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The ID of the resource.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:param vm_size: VM size of the File Server.
:type vm_size: str
:param ssh_configuration: SSH configuration for accessing the File Server node.
:type ssh_configuration: ~batch_ai.models.SshConfiguration
:param data_disks: Information about disks attached to File Server VM.
:type data_disks: ~batch_ai.models.DataDisks
:param subnet: File Server virtual network subnet resource ID.
:type subnet: ~batch_ai.models.ResourceId
:ivar mount_settings: File Server mount settings.
:vartype mount_settings: ~batch_ai.models.MountSettings
:ivar provisioning_state_transition_time: Time when the provisioning state was changed.
:vartype provisioning_state_transition_time: ~datetime.datetime
:ivar creation_time: Time when the FileServer was created.
:vartype creation_time: ~datetime.datetime
:ivar provisioning_state: Provisioning state of the File Server. Possible values: creating -
The File Server is getting created; updating - The File Server creation has been accepted and
it is getting updated; deleting - The user has requested that the File Server be deleted, and
it is in the process of being deleted; failed - The File Server creation has failed with the
specified error code. Details about the error code are specified in the message field;
succeeded - The File Server creation has succeeded. Possible values include: "creating",
"updating", "deleting", "succeeded", "failed".
:vartype provisioning_state: str or ~batch_ai.models.FileServerProvisioningState
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'mount_settings': {'readonly': True},
'provisioning_state_transition_time': {'readonly': True},
'creation_time': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'vm_size': {'key': 'properties.vmSize', 'type': 'str'},
'ssh_configuration': {'key': 'properties.sshConfiguration', 'type': 'SshConfiguration'},
'data_disks': {'key': 'properties.dataDisks', 'type': 'DataDisks'},
'subnet': {'key': 'properties.subnet', 'type': 'ResourceId'},
'mount_settings': {'key': 'properties.mountSettings', 'type': 'MountSettings'},
'provisioning_state_transition_time': {'key': 'properties.provisioningStateTransitionTime', 'type': 'iso-8601'},
'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
*,
vm_size: Optional[str] = None,
ssh_configuration: Optional["SshConfiguration"] = None,
data_disks: Optional["DataDisks"] = None,
subnet: Optional["ResourceId"] = None,
**kwargs
):
super(FileServer, self).__init__(**kwargs)
self.vm_size = vm_size
self.ssh_configuration = ssh_configuration
self.data_disks = data_disks
self.subnet = subnet
self.mount_settings = None
self.provisioning_state_transition_time = None
self.creation_time = None
self.provisioning_state = None
[docs]class FileServerCreateParameters(msrest.serialization.Model):
"""File Server creation parameters.
:param vm_size: The size of the virtual machine for the File Server. For information about
available VM sizes from the Virtual Machines Marketplace, see Sizes for Virtual Machines
(Linux).
:type vm_size: str
:param ssh_configuration: SSH configuration for the File Server node.
:type ssh_configuration: ~batch_ai.models.SshConfiguration
:param data_disks: Settings for the data disks which will be created for the File Server.
:type data_disks: ~batch_ai.models.DataDisks
:param subnet: Identifier of an existing virtual network subnet to put the File Server in. If
not provided, a new virtual network and subnet will be created.
:type subnet: ~batch_ai.models.ResourceId
"""
_attribute_map = {
'vm_size': {'key': 'properties.vmSize', 'type': 'str'},
'ssh_configuration': {'key': 'properties.sshConfiguration', 'type': 'SshConfiguration'},
'data_disks': {'key': 'properties.dataDisks', 'type': 'DataDisks'},
'subnet': {'key': 'properties.subnet', 'type': 'ResourceId'},
}
def __init__(
self,
*,
vm_size: Optional[str] = None,
ssh_configuration: Optional["SshConfiguration"] = None,
data_disks: Optional["DataDisks"] = None,
subnet: Optional["ResourceId"] = None,
**kwargs
):
super(FileServerCreateParameters, self).__init__(**kwargs)
self.vm_size = vm_size
self.ssh_configuration = ssh_configuration
self.data_disks = data_disks
self.subnet = subnet
[docs]class FileServerListResult(msrest.serialization.Model):
"""Values returned by the File Server List operation.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: The collection of File Servers.
:type value: list[~batch_ai.models.FileServer]
:ivar next_link: The continuation token.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[FileServer]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["FileServer"]] = None,
**kwargs
):
super(FileServerListResult, self).__init__(**kwargs)
self.value = value
self.next_link = None
[docs]class FileServerReference(msrest.serialization.Model):
"""File Server mounting configuration.
All required parameters must be populated in order to send to Azure.
:param file_server: Required. Resource ID of the existing File Server to be mounted.
:type file_server: ~batch_ai.models.ResourceId
:param source_directory: File Server directory that needs to be mounted. If this property is
not specified, the entire File Server will be mounted.
:type source_directory: str
:param relative_mount_path: Required. The relative path on the compute node where the File
Server will be mounted. Note that all cluster level file servers will be mounted under
$AZ_BATCHAI_MOUNT_ROOT location and all job level file servers will be mounted under
$AZ_BATCHAI_JOB_MOUNT_ROOT.
:type relative_mount_path: str
:param mount_options: Mount options to be passed to mount command.
:type mount_options: str
"""
_validation = {
'file_server': {'required': True},
'relative_mount_path': {'required': True},
}
_attribute_map = {
'file_server': {'key': 'fileServer', 'type': 'ResourceId'},
'source_directory': {'key': 'sourceDirectory', 'type': 'str'},
'relative_mount_path': {'key': 'relativeMountPath', 'type': 'str'},
'mount_options': {'key': 'mountOptions', 'type': 'str'},
}
def __init__(
self,
*,
file_server: "ResourceId",
relative_mount_path: str,
source_directory: Optional[str] = None,
mount_options: Optional[str] = None,
**kwargs
):
super(FileServerReference, self).__init__(**kwargs)
self.file_server = file_server
self.source_directory = source_directory
self.relative_mount_path = relative_mount_path
self.mount_options = mount_options
[docs]class FileServersListByWorkspaceOptions(msrest.serialization.Model):
"""Parameter group.
:param max_results: The maximum number of items to return in the response. A maximum of 1000
files can be returned.
:type max_results: int
"""
_validation = {
'max_results': {'maximum': 1000, 'minimum': 1},
}
_attribute_map = {
'max_results': {'key': 'maxResults', 'type': 'int'},
}
def __init__(
self,
*,
max_results: Optional[int] = 1000,
**kwargs
):
super(FileServersListByWorkspaceOptions, self).__init__(**kwargs)
self.max_results = max_results
[docs]class HorovodSettings(msrest.serialization.Model):
"""Specifies the settings for Horovod job.
All required parameters must be populated in order to send to Azure.
:param python_script_file_path: Required. The python script to execute.
:type python_script_file_path: str
:param python_interpreter_path: The path to the Python interpreter.
:type python_interpreter_path: str
:param command_line_args: Command line arguments that need to be passed to the python script.
:type command_line_args: str
:param process_count: Number of processes to launch for the job execution. The default value
for this property is equal to nodeCount property.
:type process_count: int
"""
_validation = {
'python_script_file_path': {'required': True},
}
_attribute_map = {
'python_script_file_path': {'key': 'pythonScriptFilePath', 'type': 'str'},
'python_interpreter_path': {'key': 'pythonInterpreterPath', 'type': 'str'},
'command_line_args': {'key': 'commandLineArgs', 'type': 'str'},
'process_count': {'key': 'processCount', 'type': 'int'},
}
def __init__(
self,
*,
python_script_file_path: str,
python_interpreter_path: Optional[str] = None,
command_line_args: Optional[str] = None,
process_count: Optional[int] = None,
**kwargs
):
super(HorovodSettings, self).__init__(**kwargs)
self.python_script_file_path = python_script_file_path
self.python_interpreter_path = python_interpreter_path
self.command_line_args = command_line_args
self.process_count = process_count
[docs]class ImageReference(msrest.serialization.Model):
"""The OS image reference.
All required parameters must be populated in order to send to Azure.
:param publisher: Required. Publisher of the image.
:type publisher: str
:param offer: Required. Offer of the image.
:type offer: str
:param sku: Required. SKU of the image.
:type sku: str
:param version: Version of the image.
:type version: str
:param virtual_machine_image_id: The ARM resource identifier of the virtual machine image for
the compute nodes. This is of the form
/subscriptions/{subscriptionId}/resourceGroups/{resourceGroup}/providers/Microsoft.Compute/images/{imageName}.
The virtual machine image must be in the same region and subscription as the cluster. For
information about the firewall settings for the Batch node agent to communicate with the Batch
service see
https://docs.microsoft.com/en-us/azure/batch/batch-api-basics#virtual-network-vnet-and-firewall-configuration.
Note, you need to provide publisher, offer and sku of the base OS image of which the custom
image has been derived from.
:type virtual_machine_image_id: str
"""
_validation = {
'publisher': {'required': True},
'offer': {'required': True},
'sku': {'required': True},
}
_attribute_map = {
'publisher': {'key': 'publisher', 'type': 'str'},
'offer': {'key': 'offer', 'type': 'str'},
'sku': {'key': 'sku', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'virtual_machine_image_id': {'key': 'virtualMachineImageId', 'type': 'str'},
}
def __init__(
self,
*,
publisher: str,
offer: str,
sku: str,
version: Optional[str] = None,
virtual_machine_image_id: Optional[str] = None,
**kwargs
):
super(ImageReference, self).__init__(**kwargs)
self.publisher = publisher
self.offer = offer
self.sku = sku
self.version = version
self.virtual_machine_image_id = virtual_machine_image_id
[docs]class ImageSourceRegistry(msrest.serialization.Model):
"""Information about docker image for the job.
All required parameters must be populated in order to send to Azure.
:param server_url: URL for image repository.
:type server_url: str
:param image: Required. The name of the image in the image repository.
:type image: str
:param credentials: Credentials to access the private docker repository.
:type credentials: ~batch_ai.models.PrivateRegistryCredentials
"""
_validation = {
'image': {'required': True},
}
_attribute_map = {
'server_url': {'key': 'serverUrl', 'type': 'str'},
'image': {'key': 'image', 'type': 'str'},
'credentials': {'key': 'credentials', 'type': 'PrivateRegistryCredentials'},
}
def __init__(
self,
*,
image: str,
server_url: Optional[str] = None,
credentials: Optional["PrivateRegistryCredentials"] = None,
**kwargs
):
super(ImageSourceRegistry, self).__init__(**kwargs)
self.server_url = server_url
self.image = image
self.credentials = credentials
[docs]class Job(ProxyResource):
"""Information about a Job.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The ID of the resource.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:param scheduling_priority: Scheduling priority associated with the job. Possible values
include: "low", "normal", "high".
:type scheduling_priority: str or ~batch_ai.models.JobPriority
:param cluster: Resource ID of the cluster associated with the job.
:type cluster: ~batch_ai.models.ResourceId
:param mount_volumes: Collection of mount volumes available to the job during execution. These
volumes are mounted before the job execution and unmounted after the job completion. The
volumes are mounted at location specified by $AZ_BATCHAI_JOB_MOUNT_ROOT environment variable.
:type mount_volumes: ~batch_ai.models.MountVolumes
:param node_count: The job will be gang scheduled on that many compute nodes.
:type node_count: int
:param container_settings: If the container was downloaded as part of cluster setup then the
same container image will be used. If not provided, the job will run on the VM.
:type container_settings: ~batch_ai.models.ContainerSettings
:param tool_type: Possible values are: cntk, tensorflow, caffe, caffe2, chainer, pytorch,
custom, custommpi, horovod. Possible values include: "cntk", "tensorflow", "caffe", "caffe2",
"chainer", "horovod", "custommpi", "custom".
:type tool_type: str or ~batch_ai.models.ToolType
:param cntk_settings: CNTK (aka Microsoft Cognitive Toolkit) job settings.
:type cntk_settings: ~batch_ai.models.CNTKsettings
:param py_torch_settings: pyTorch job settings.
:type py_torch_settings: ~batch_ai.models.PyTorchSettings
:param tensor_flow_settings: TensorFlow job settings.
:type tensor_flow_settings: ~batch_ai.models.TensorFlowSettings
:param caffe_settings: Caffe job settings.
:type caffe_settings: ~batch_ai.models.CaffeSettings
:param caffe2_settings: Caffe2 job settings.
:type caffe2_settings: ~batch_ai.models.Caffe2Settings
:param chainer_settings: Chainer job settings.
:type chainer_settings: ~batch_ai.models.ChainerSettings
:param custom_toolkit_settings: Custom tool kit job settings.
:type custom_toolkit_settings: ~batch_ai.models.CustomToolkitSettings
:param custom_mpi_settings: Custom MPI job settings.
:type custom_mpi_settings: ~batch_ai.models.CustomMpiSettings
:param horovod_settings: Specifies the settings for Horovod job.
:type horovod_settings: ~batch_ai.models.HorovodSettings
:param job_preparation: The specified actions will run on all the nodes that are part of the
job.
:type job_preparation: ~batch_ai.models.JobPreparation
:ivar job_output_directory_path_segment: A segment of job's output directories path created by
Batch AI. Batch AI creates job's output directories under an unique path to avoid conflicts
between jobs. This value contains a path segment generated by Batch AI to make the path unique
and can be used to find the output directory on the node or mounted filesystem.
:vartype job_output_directory_path_segment: str
:param std_out_err_path_prefix: The path where the Batch AI service stores stdout, stderror and
execution log of the job.
:type std_out_err_path_prefix: str
:param input_directories: A list of input directories for the job.
:type input_directories: list[~batch_ai.models.InputDirectory]
:param output_directories: A list of output directories for the job.
:type output_directories: list[~batch_ai.models.OutputDirectory]
:param environment_variables: A collection of user defined environment variables to be setup
for the job.
:type environment_variables: list[~batch_ai.models.EnvironmentVariable]
:param secrets: A collection of user defined environment variables with secret values to be
setup for the job. Server will never report values of these variables back.
:type secrets: list[~batch_ai.models.EnvironmentVariableWithSecretValue]
:param constraints: Constraints associated with the Job.
:type constraints: ~batch_ai.models.JobPropertiesConstraints
:ivar creation_time: The creation time of the job.
:vartype creation_time: ~datetime.datetime
:ivar provisioning_state: The provisioned state of the Batch AI job. Possible values include:
"creating", "succeeded", "failed", "deleting".
:vartype provisioning_state: str or ~batch_ai.models.ProvisioningState
:ivar provisioning_state_transition_time: The time at which the job entered its current
provisioning state.
:vartype provisioning_state_transition_time: ~datetime.datetime
:ivar execution_state: The current state of the job. Possible values are: queued - The job is
queued and able to run. A job enters this state when it is created, or when it is awaiting a
retry after a failed run. running - The job is running on a compute cluster. This includes
job-level preparation such as downloading resource files or set up container specified on the
job - it does not necessarily mean that the job command line has started executing. terminating
- The job is terminated by the user, the terminate operation is in progress. succeeded - The
job has completed running successfully and exited with exit code 0. failed - The job has
finished unsuccessfully (failed with a non-zero exit code) and has exhausted its retry limit. A
job is also marked as failed if an error occurred launching the job. Possible values include:
"queued", "running", "terminating", "succeeded", "failed".
:vartype execution_state: str or ~batch_ai.models.ExecutionState
:ivar execution_state_transition_time: The time at which the job entered its current execution
state.
:vartype execution_state_transition_time: ~datetime.datetime
:param execution_info: Information about the execution of a job.
:type execution_info: ~batch_ai.models.JobPropertiesExecutionInfo
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'job_output_directory_path_segment': {'readonly': True},
'creation_time': {'readonly': True},
'provisioning_state': {'readonly': True},
'provisioning_state_transition_time': {'readonly': True},
'execution_state': {'readonly': True},
'execution_state_transition_time': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'scheduling_priority': {'key': 'properties.schedulingPriority', 'type': 'str'},
'cluster': {'key': 'properties.cluster', 'type': 'ResourceId'},
'mount_volumes': {'key': 'properties.mountVolumes', 'type': 'MountVolumes'},
'node_count': {'key': 'properties.nodeCount', 'type': 'int'},
'container_settings': {'key': 'properties.containerSettings', 'type': 'ContainerSettings'},
'tool_type': {'key': 'properties.toolType', 'type': 'str'},
'cntk_settings': {'key': 'properties.cntkSettings', 'type': 'CNTKsettings'},
'py_torch_settings': {'key': 'properties.pyTorchSettings', 'type': 'PyTorchSettings'},
'tensor_flow_settings': {'key': 'properties.tensorFlowSettings', 'type': 'TensorFlowSettings'},
'caffe_settings': {'key': 'properties.caffeSettings', 'type': 'CaffeSettings'},
'caffe2_settings': {'key': 'properties.caffe2Settings', 'type': 'Caffe2Settings'},
'chainer_settings': {'key': 'properties.chainerSettings', 'type': 'ChainerSettings'},
'custom_toolkit_settings': {'key': 'properties.customToolkitSettings', 'type': 'CustomToolkitSettings'},
'custom_mpi_settings': {'key': 'properties.customMpiSettings', 'type': 'CustomMpiSettings'},
'horovod_settings': {'key': 'properties.horovodSettings', 'type': 'HorovodSettings'},
'job_preparation': {'key': 'properties.jobPreparation', 'type': 'JobPreparation'},
'job_output_directory_path_segment': {'key': 'properties.jobOutputDirectoryPathSegment', 'type': 'str'},
'std_out_err_path_prefix': {'key': 'properties.stdOutErrPathPrefix', 'type': 'str'},
'input_directories': {'key': 'properties.inputDirectories', 'type': '[InputDirectory]'},
'output_directories': {'key': 'properties.outputDirectories', 'type': '[OutputDirectory]'},
'environment_variables': {'key': 'properties.environmentVariables', 'type': '[EnvironmentVariable]'},
'secrets': {'key': 'properties.secrets', 'type': '[EnvironmentVariableWithSecretValue]'},
'constraints': {'key': 'properties.constraints', 'type': 'JobPropertiesConstraints'},
'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'provisioning_state_transition_time': {'key': 'properties.provisioningStateTransitionTime', 'type': 'iso-8601'},
'execution_state': {'key': 'properties.executionState', 'type': 'str'},
'execution_state_transition_time': {'key': 'properties.executionStateTransitionTime', 'type': 'iso-8601'},
'execution_info': {'key': 'properties.executionInfo', 'type': 'JobPropertiesExecutionInfo'},
}
def __init__(
self,
*,
scheduling_priority: Optional[Union[str, "JobPriority"]] = None,
cluster: Optional["ResourceId"] = None,
mount_volumes: Optional["MountVolumes"] = None,
node_count: Optional[int] = None,
container_settings: Optional["ContainerSettings"] = None,
tool_type: Optional[Union[str, "ToolType"]] = None,
cntk_settings: Optional["CNTKsettings"] = None,
py_torch_settings: Optional["PyTorchSettings"] = None,
tensor_flow_settings: Optional["TensorFlowSettings"] = None,
caffe_settings: Optional["CaffeSettings"] = None,
caffe2_settings: Optional["Caffe2Settings"] = None,
chainer_settings: Optional["ChainerSettings"] = None,
custom_toolkit_settings: Optional["CustomToolkitSettings"] = None,
custom_mpi_settings: Optional["CustomMpiSettings"] = None,
horovod_settings: Optional["HorovodSettings"] = None,
job_preparation: Optional["JobPreparation"] = None,
std_out_err_path_prefix: Optional[str] = None,
input_directories: Optional[List["InputDirectory"]] = None,
output_directories: Optional[List["OutputDirectory"]] = None,
environment_variables: Optional[List["EnvironmentVariable"]] = None,
secrets: Optional[List["EnvironmentVariableWithSecretValue"]] = None,
constraints: Optional["JobPropertiesConstraints"] = None,
execution_info: Optional["JobPropertiesExecutionInfo"] = None,
**kwargs
):
super(Job, self).__init__(**kwargs)
self.scheduling_priority = scheduling_priority
self.cluster = cluster
self.mount_volumes = mount_volumes
self.node_count = node_count
self.container_settings = container_settings
self.tool_type = tool_type
self.cntk_settings = cntk_settings
self.py_torch_settings = py_torch_settings
self.tensor_flow_settings = tensor_flow_settings
self.caffe_settings = caffe_settings
self.caffe2_settings = caffe2_settings
self.chainer_settings = chainer_settings
self.custom_toolkit_settings = custom_toolkit_settings
self.custom_mpi_settings = custom_mpi_settings
self.horovod_settings = horovod_settings
self.job_preparation = job_preparation
self.job_output_directory_path_segment = None
self.std_out_err_path_prefix = std_out_err_path_prefix
self.input_directories = input_directories
self.output_directories = output_directories
self.environment_variables = environment_variables
self.secrets = secrets
self.constraints = constraints
self.creation_time = None
self.provisioning_state = None
self.provisioning_state_transition_time = None
self.execution_state = None
self.execution_state_transition_time = None
self.execution_info = execution_info
[docs]class JobBasePropertiesConstraints(msrest.serialization.Model):
"""Constraints associated with the Job.
:param max_wall_clock_time: Max time the job can run. Default value: 1 week.
:type max_wall_clock_time: ~datetime.timedelta
"""
_attribute_map = {
'max_wall_clock_time': {'key': 'maxWallClockTime', 'type': 'duration'},
}
def __init__(
self,
*,
max_wall_clock_time: Optional[datetime.timedelta] = "7.00:00:00",
**kwargs
):
super(JobBasePropertiesConstraints, self).__init__(**kwargs)
self.max_wall_clock_time = max_wall_clock_time
[docs]class JobCreateParameters(msrest.serialization.Model):
"""Job creation parameters.
:param scheduling_priority: Scheduling priority associated with the job. Possible values: low,
normal, high. Possible values include: "low", "normal", "high".
:type scheduling_priority: str or ~batch_ai.models.JobPriority
:param cluster: Resource ID of the cluster on which this job will run.
:type cluster: ~batch_ai.models.ResourceId
:param mount_volumes: Information on mount volumes to be used by the job. These volumes will be
mounted before the job execution and will be unmounted after the job completion. The volumes
will be mounted at location specified by $AZ_BATCHAI_JOB_MOUNT_ROOT environment variable.
:type mount_volumes: ~batch_ai.models.MountVolumes
:param node_count: Number of compute nodes to run the job on. The job will be gang scheduled on
that many compute nodes.
:type node_count: int
:param container_settings: Docker container settings for the job. If not provided, the job will
run directly on the node.
:type container_settings: ~batch_ai.models.ContainerSettings
:param cntk_settings: Settings for CNTK (aka Microsoft Cognitive Toolkit) job.
:type cntk_settings: ~batch_ai.models.CNTKsettings
:param py_torch_settings: Settings for pyTorch job.
:type py_torch_settings: ~batch_ai.models.PyTorchSettings
:param tensor_flow_settings: Settings for Tensor Flow job.
:type tensor_flow_settings: ~batch_ai.models.TensorFlowSettings
:param caffe_settings: Settings for Caffe job.
:type caffe_settings: ~batch_ai.models.CaffeSettings
:param caffe2_settings: Settings for Caffe2 job.
:type caffe2_settings: ~batch_ai.models.Caffe2Settings
:param chainer_settings: Settings for Chainer job.
:type chainer_settings: ~batch_ai.models.ChainerSettings
:param custom_toolkit_settings: Settings for custom tool kit job.
:type custom_toolkit_settings: ~batch_ai.models.CustomToolkitSettings
:param custom_mpi_settings: Settings for custom MPI job.
:type custom_mpi_settings: ~batch_ai.models.CustomMpiSettings
:param horovod_settings: Settings for Horovod job.
:type horovod_settings: ~batch_ai.models.HorovodSettings
:param job_preparation: A command line to be executed on each node allocated for the job before
tool kit is launched.
:type job_preparation: ~batch_ai.models.JobPreparation
:param std_out_err_path_prefix: The path where the Batch AI service will store stdout, stderror
and execution log of the job.
:type std_out_err_path_prefix: str
:param input_directories: A list of input directories for the job.
:type input_directories: list[~batch_ai.models.InputDirectory]
:param output_directories: A list of output directories for the job.
:type output_directories: list[~batch_ai.models.OutputDirectory]
:param environment_variables: A list of user defined environment variables which will be setup
for the job.
:type environment_variables: list[~batch_ai.models.EnvironmentVariable]
:param secrets: A list of user defined environment variables with secret values which will be
setup for the job. Server will never report values of these variables back.
:type secrets: list[~batch_ai.models.EnvironmentVariableWithSecretValue]
:param constraints: Constraints associated with the Job.
:type constraints: ~batch_ai.models.JobBasePropertiesConstraints
"""
_attribute_map = {
'scheduling_priority': {'key': 'properties.schedulingPriority', 'type': 'str'},
'cluster': {'key': 'properties.cluster', 'type': 'ResourceId'},
'mount_volumes': {'key': 'properties.mountVolumes', 'type': 'MountVolumes'},
'node_count': {'key': 'properties.nodeCount', 'type': 'int'},
'container_settings': {'key': 'properties.containerSettings', 'type': 'ContainerSettings'},
'cntk_settings': {'key': 'properties.cntkSettings', 'type': 'CNTKsettings'},
'py_torch_settings': {'key': 'properties.pyTorchSettings', 'type': 'PyTorchSettings'},
'tensor_flow_settings': {'key': 'properties.tensorFlowSettings', 'type': 'TensorFlowSettings'},
'caffe_settings': {'key': 'properties.caffeSettings', 'type': 'CaffeSettings'},
'caffe2_settings': {'key': 'properties.caffe2Settings', 'type': 'Caffe2Settings'},
'chainer_settings': {'key': 'properties.chainerSettings', 'type': 'ChainerSettings'},
'custom_toolkit_settings': {'key': 'properties.customToolkitSettings', 'type': 'CustomToolkitSettings'},
'custom_mpi_settings': {'key': 'properties.customMpiSettings', 'type': 'CustomMpiSettings'},
'horovod_settings': {'key': 'properties.horovodSettings', 'type': 'HorovodSettings'},
'job_preparation': {'key': 'properties.jobPreparation', 'type': 'JobPreparation'},
'std_out_err_path_prefix': {'key': 'properties.stdOutErrPathPrefix', 'type': 'str'},
'input_directories': {'key': 'properties.inputDirectories', 'type': '[InputDirectory]'},
'output_directories': {'key': 'properties.outputDirectories', 'type': '[OutputDirectory]'},
'environment_variables': {'key': 'properties.environmentVariables', 'type': '[EnvironmentVariable]'},
'secrets': {'key': 'properties.secrets', 'type': '[EnvironmentVariableWithSecretValue]'},
'constraints': {'key': 'properties.constraints', 'type': 'JobBasePropertiesConstraints'},
}
def __init__(
self,
*,
scheduling_priority: Optional[Union[str, "JobPriority"]] = None,
cluster: Optional["ResourceId"] = None,
mount_volumes: Optional["MountVolumes"] = None,
node_count: Optional[int] = None,
container_settings: Optional["ContainerSettings"] = None,
cntk_settings: Optional["CNTKsettings"] = None,
py_torch_settings: Optional["PyTorchSettings"] = None,
tensor_flow_settings: Optional["TensorFlowSettings"] = None,
caffe_settings: Optional["CaffeSettings"] = None,
caffe2_settings: Optional["Caffe2Settings"] = None,
chainer_settings: Optional["ChainerSettings"] = None,
custom_toolkit_settings: Optional["CustomToolkitSettings"] = None,
custom_mpi_settings: Optional["CustomMpiSettings"] = None,
horovod_settings: Optional["HorovodSettings"] = None,
job_preparation: Optional["JobPreparation"] = None,
std_out_err_path_prefix: Optional[str] = None,
input_directories: Optional[List["InputDirectory"]] = None,
output_directories: Optional[List["OutputDirectory"]] = None,
environment_variables: Optional[List["EnvironmentVariable"]] = None,
secrets: Optional[List["EnvironmentVariableWithSecretValue"]] = None,
constraints: Optional["JobBasePropertiesConstraints"] = None,
**kwargs
):
super(JobCreateParameters, self).__init__(**kwargs)
self.scheduling_priority = scheduling_priority
self.cluster = cluster
self.mount_volumes = mount_volumes
self.node_count = node_count
self.container_settings = container_settings
self.cntk_settings = cntk_settings
self.py_torch_settings = py_torch_settings
self.tensor_flow_settings = tensor_flow_settings
self.caffe_settings = caffe_settings
self.caffe2_settings = caffe2_settings
self.chainer_settings = chainer_settings
self.custom_toolkit_settings = custom_toolkit_settings
self.custom_mpi_settings = custom_mpi_settings
self.horovod_settings = horovod_settings
self.job_preparation = job_preparation
self.std_out_err_path_prefix = std_out_err_path_prefix
self.input_directories = input_directories
self.output_directories = output_directories
self.environment_variables = environment_variables
self.secrets = secrets
self.constraints = constraints
[docs]class JobListResult(msrest.serialization.Model):
"""Values returned by the List operation.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: The collection of jobs.
:vartype value: list[~batch_ai.models.Job]
:ivar next_link: The continuation token.
:vartype next_link: str
"""
_validation = {
'value': {'readonly': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Job]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(JobListResult, self).__init__(**kwargs)
self.value = None
self.next_link = None
[docs]class JobPreparation(msrest.serialization.Model):
"""Job preparation settings.
All required parameters must be populated in order to send to Azure.
:param command_line: Required. The command line to execute. If containerSettings is specified
on the job, this commandLine will be executed in the same container as job. Otherwise it will
be executed on the node.
:type command_line: str
"""
_validation = {
'command_line': {'required': True},
}
_attribute_map = {
'command_line': {'key': 'commandLine', 'type': 'str'},
}
def __init__(
self,
*,
command_line: str,
**kwargs
):
super(JobPreparation, self).__init__(**kwargs)
self.command_line = command_line
[docs]class JobPropertiesConstraints(msrest.serialization.Model):
"""Constraints associated with the Job.
:param max_wall_clock_time: Max time the job can run. Default value: 1 week.
:type max_wall_clock_time: ~datetime.timedelta
"""
_attribute_map = {
'max_wall_clock_time': {'key': 'maxWallClockTime', 'type': 'duration'},
}
def __init__(
self,
*,
max_wall_clock_time: Optional[datetime.timedelta] = "7.00:00:00",
**kwargs
):
super(JobPropertiesConstraints, self).__init__(**kwargs)
self.max_wall_clock_time = max_wall_clock_time
[docs]class JobPropertiesExecutionInfo(msrest.serialization.Model):
"""Information about the execution of a job.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar start_time: The time at which the job started running. 'Running' corresponds to the
running state. If the job has been restarted or retried, this is the most recent time at which
the job started running. This property is present only for job that are in the running or
completed state.
:vartype start_time: ~datetime.datetime
:ivar end_time: The time at which the job completed. This property is only returned if the job
is in completed state.
:vartype end_time: ~datetime.datetime
:ivar exit_code: The exit code of the job. This property is only returned if the job is in
completed state.
:vartype exit_code: int
:ivar errors: A collection of errors encountered by the service during job execution.
:vartype errors: list[~batch_ai.models.BatchAIError]
"""
_validation = {
'start_time': {'readonly': True},
'end_time': {'readonly': True},
'exit_code': {'readonly': True},
'errors': {'readonly': True},
}
_attribute_map = {
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'exit_code': {'key': 'exitCode', 'type': 'int'},
'errors': {'key': 'errors', 'type': '[BatchAIError]'},
}
def __init__(
self,
**kwargs
):
super(JobPropertiesExecutionInfo, self).__init__(**kwargs)
self.start_time = None
self.end_time = None
self.exit_code = None
self.errors = None
[docs]class JobsListByExperimentOptions(msrest.serialization.Model):
"""Parameter group.
:param max_results: The maximum number of items to return in the response. A maximum of 1000
files can be returned.
:type max_results: int
"""
_validation = {
'max_results': {'maximum': 1000, 'minimum': 1},
}
_attribute_map = {
'max_results': {'key': 'maxResults', 'type': 'int'},
}
def __init__(
self,
*,
max_results: Optional[int] = 1000,
**kwargs
):
super(JobsListByExperimentOptions, self).__init__(**kwargs)
self.max_results = max_results
[docs]class JobsListOutputFilesOptions(msrest.serialization.Model):
"""Parameter group.
All required parameters must be populated in order to send to Azure.
:param outputdirectoryid: Required. Id of the job output directory. This is the
OutputDirectory-->id parameter that is given by the user during Create Job.
:type outputdirectoryid: str
:param directory: The path to the directory.
:type directory: str
:param linkexpiryinminutes: The number of minutes after which the download link will expire.
:type linkexpiryinminutes: int
:param max_results: The maximum number of items to return in the response. A maximum of 1000
files can be returned.
:type max_results: int
"""
_validation = {
'outputdirectoryid': {'required': True},
'linkexpiryinminutes': {'maximum': 600, 'minimum': 5},
'max_results': {'maximum': 1000, 'minimum': 1},
}
_attribute_map = {
'outputdirectoryid': {'key': 'outputdirectoryid', 'type': 'str'},
'directory': {'key': 'directory', 'type': 'str'},
'linkexpiryinminutes': {'key': 'linkexpiryinminutes', 'type': 'int'},
'max_results': {'key': 'maxResults', 'type': 'int'},
}
def __init__(
self,
*,
outputdirectoryid: str,
directory: Optional[str] = ".",
linkexpiryinminutes: Optional[int] = 60,
max_results: Optional[int] = 1000,
**kwargs
):
super(JobsListOutputFilesOptions, self).__init__(**kwargs)
self.outputdirectoryid = outputdirectoryid
self.directory = directory
self.linkexpiryinminutes = linkexpiryinminutes
self.max_results = max_results
[docs]class KeyVaultSecretReference(msrest.serialization.Model):
"""Key Vault Secret reference.
All required parameters must be populated in order to send to Azure.
:param source_vault: Required. Fully qualified resource identifier of the Key Vault.
:type source_vault: ~batch_ai.models.ResourceId
:param secret_url: Required. The URL referencing a secret in the Key Vault.
:type secret_url: str
"""
_validation = {
'source_vault': {'required': True},
'secret_url': {'required': True},
}
_attribute_map = {
'source_vault': {'key': 'sourceVault', 'type': 'ResourceId'},
'secret_url': {'key': 'secretUrl', 'type': 'str'},
}
def __init__(
self,
*,
source_vault: "ResourceId",
secret_url: str,
**kwargs
):
super(KeyVaultSecretReference, self).__init__(**kwargs)
self.source_vault = source_vault
self.secret_url = secret_url
[docs]class ListUsagesResult(msrest.serialization.Model):
"""The List Usages operation response.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: The list of compute resource usages.
:vartype value: list[~batch_ai.models.Usage]
:ivar next_link: The URI to fetch the next page of compute resource usage information. Call
ListNext() with this to fetch the next page of compute resource usage information.
:vartype next_link: str
"""
_validation = {
'value': {'readonly': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Usage]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ListUsagesResult, self).__init__(**kwargs)
self.value = None
self.next_link = None
[docs]class ManualScaleSettings(msrest.serialization.Model):
"""Manual scale settings for the cluster.
All required parameters must be populated in order to send to Azure.
:param target_node_count: Required. The desired number of compute nodes in the Cluster. Default
is 0.
:type target_node_count: int
:param node_deallocation_option: An action to be performed when the cluster size is decreasing.
The default value is requeue. Possible values include: "requeue", "terminate",
"waitforjobcompletion". Default value: "requeue".
:type node_deallocation_option: str or ~batch_ai.models.DeallocationOption
"""
_validation = {
'target_node_count': {'required': True},
}
_attribute_map = {
'target_node_count': {'key': 'targetNodeCount', 'type': 'int'},
'node_deallocation_option': {'key': 'nodeDeallocationOption', 'type': 'str'},
}
def __init__(
self,
*,
target_node_count: int,
node_deallocation_option: Optional[Union[str, "DeallocationOption"]] = "requeue",
**kwargs
):
super(ManualScaleSettings, self).__init__(**kwargs)
self.target_node_count = target_node_count
self.node_deallocation_option = node_deallocation_option
[docs]class MountSettings(msrest.serialization.Model):
"""File Server mount Information.
:param mount_point: Path where the data disks are mounted on the File Server.
:type mount_point: str
:param file_server_public_ip: Public IP address of the File Server which can be used to SSH to
the node from outside of the subnet.
:type file_server_public_ip: str
:param file_server_internal_ip: Internal IP address of the File Server which can be used to
access the File Server from within the subnet.
:type file_server_internal_ip: str
"""
_attribute_map = {
'mount_point': {'key': 'mountPoint', 'type': 'str'},
'file_server_public_ip': {'key': 'fileServerPublicIP', 'type': 'str'},
'file_server_internal_ip': {'key': 'fileServerInternalIP', 'type': 'str'},
}
def __init__(
self,
*,
mount_point: Optional[str] = None,
file_server_public_ip: Optional[str] = None,
file_server_internal_ip: Optional[str] = None,
**kwargs
):
super(MountSettings, self).__init__(**kwargs)
self.mount_point = mount_point
self.file_server_public_ip = file_server_public_ip
self.file_server_internal_ip = file_server_internal_ip
[docs]class MountVolumes(msrest.serialization.Model):
"""Details of volumes to mount on the cluster.
:param azure_file_shares: A collection of Azure File Shares that are to be mounted to the
cluster nodes.
:type azure_file_shares: list[~batch_ai.models.AzureFileShareReference]
:param azure_blob_file_systems: A collection of Azure Blob Containers that are to be mounted to
the cluster nodes.
:type azure_blob_file_systems: list[~batch_ai.models.AzureBlobFileSystemReference]
:param file_servers: A collection of Batch AI File Servers that are to be mounted to the
cluster nodes.
:type file_servers: list[~batch_ai.models.FileServerReference]
:param unmanaged_file_systems: A collection of unmanaged file systems that are to be mounted to
the cluster nodes.
:type unmanaged_file_systems: list[~batch_ai.models.UnmanagedFileSystemReference]
"""
_attribute_map = {
'azure_file_shares': {'key': 'azureFileShares', 'type': '[AzureFileShareReference]'},
'azure_blob_file_systems': {'key': 'azureBlobFileSystems', 'type': '[AzureBlobFileSystemReference]'},
'file_servers': {'key': 'fileServers', 'type': '[FileServerReference]'},
'unmanaged_file_systems': {'key': 'unmanagedFileSystems', 'type': '[UnmanagedFileSystemReference]'},
}
def __init__(
self,
*,
azure_file_shares: Optional[List["AzureFileShareReference"]] = None,
azure_blob_file_systems: Optional[List["AzureBlobFileSystemReference"]] = None,
file_servers: Optional[List["FileServerReference"]] = None,
unmanaged_file_systems: Optional[List["UnmanagedFileSystemReference"]] = None,
**kwargs
):
super(MountVolumes, self).__init__(**kwargs)
self.azure_file_shares = azure_file_shares
self.azure_blob_file_systems = azure_blob_file_systems
self.file_servers = file_servers
self.unmanaged_file_systems = unmanaged_file_systems
[docs]class NameValuePair(msrest.serialization.Model):
"""Name-value pair.
:param name: The name in the name-value pair.
:type name: str
:param value: The value in the name-value pair.
:type value: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
value: Optional[str] = None,
**kwargs
):
super(NameValuePair, self).__init__(**kwargs)
self.name = name
self.value = value
[docs]class NodeSetup(msrest.serialization.Model):
"""Node setup settings.
:param setup_task: Setup task to run on cluster nodes when nodes got created or rebooted. The
setup task code needs to be idempotent. Generally the setup task is used to download static
data that is required for all jobs that run on the cluster VMs and/or to download/install
software.
:type setup_task: ~batch_ai.models.SetupTask
:param mount_volumes: Mount volumes to be available to setup task and all jobs executing on the
cluster. The volumes will be mounted at location specified by $AZ_BATCHAI_MOUNT_ROOT
environment variable.
:type mount_volumes: ~batch_ai.models.MountVolumes
:param performance_counters_settings: Settings for performance counters collecting and
uploading.
:type performance_counters_settings: ~batch_ai.models.PerformanceCountersSettings
"""
_attribute_map = {
'setup_task': {'key': 'setupTask', 'type': 'SetupTask'},
'mount_volumes': {'key': 'mountVolumes', 'type': 'MountVolumes'},
'performance_counters_settings': {'key': 'performanceCountersSettings', 'type': 'PerformanceCountersSettings'},
}
def __init__(
self,
*,
setup_task: Optional["SetupTask"] = None,
mount_volumes: Optional["MountVolumes"] = None,
performance_counters_settings: Optional["PerformanceCountersSettings"] = None,
**kwargs
):
super(NodeSetup, self).__init__(**kwargs)
self.setup_task = setup_task
self.mount_volumes = mount_volumes
self.performance_counters_settings = performance_counters_settings
[docs]class NodeStateCounts(msrest.serialization.Model):
"""Counts of various compute node states on the cluster.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar idle_node_count: Number of compute nodes in idle state.
:vartype idle_node_count: int
:ivar running_node_count: Number of compute nodes which are running jobs.
:vartype running_node_count: int
:ivar preparing_node_count: Number of compute nodes which are being prepared.
:vartype preparing_node_count: int
:ivar unusable_node_count: Number of compute nodes which are in unusable state.
:vartype unusable_node_count: int
:ivar leaving_node_count: Number of compute nodes which are leaving the cluster.
:vartype leaving_node_count: int
"""
_validation = {
'idle_node_count': {'readonly': True},
'running_node_count': {'readonly': True},
'preparing_node_count': {'readonly': True},
'unusable_node_count': {'readonly': True},
'leaving_node_count': {'readonly': True},
}
_attribute_map = {
'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'},
'running_node_count': {'key': 'runningNodeCount', 'type': 'int'},
'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'},
'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'},
'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(NodeStateCounts, self).__init__(**kwargs)
self.idle_node_count = None
self.running_node_count = None
self.preparing_node_count = None
self.unusable_node_count = None
self.leaving_node_count = None
[docs]class Operation(msrest.serialization.Model):
"""Details of a REST API operation.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar name: This is of the format {provider}/{resource}/{operation}.
:vartype name: str
:param display: The object that describes the operation.
:type display: ~batch_ai.models.OperationDisplay
:ivar origin: The intended executor of the operation.
:vartype origin: str
:param properties: Any object.
:type properties: any
"""
_validation = {
'name': {'readonly': True},
'origin': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display': {'key': 'display', 'type': 'OperationDisplay'},
'origin': {'key': 'origin', 'type': 'str'},
'properties': {'key': 'properties', 'type': 'object'},
}
def __init__(
self,
*,
display: Optional["OperationDisplay"] = None,
properties: Optional[Any] = None,
**kwargs
):
super(Operation, self).__init__(**kwargs)
self.name = None
self.display = display
self.origin = None
self.properties = properties
[docs]class OperationDisplay(msrest.serialization.Model):
"""The object that describes the operation.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar provider: Friendly name of the resource provider.
:vartype provider: str
:ivar operation: For example: read, write, delete, or listKeys/action.
:vartype operation: str
:ivar resource: The resource type on which the operation is performed.
:vartype resource: str
:ivar description: The friendly name of the operation.
:vartype description: str
"""
_validation = {
'provider': {'readonly': True},
'operation': {'readonly': True},
'resource': {'readonly': True},
'description': {'readonly': True},
}
_attribute_map = {
'provider': {'key': 'provider', 'type': 'str'},
'operation': {'key': 'operation', 'type': 'str'},
'resource': {'key': 'resource', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(OperationDisplay, self).__init__(**kwargs)
self.provider = None
self.operation = None
self.resource = None
self.description = None
[docs]class OperationListResult(msrest.serialization.Model):
"""Contains the list of all operations supported by BatchAI resource provider.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: The list of operations supported by the resource provider.
:vartype value: list[~batch_ai.models.Operation]
:ivar next_link: The URL to get the next set of operation list results if there are any.
:vartype next_link: str
"""
_validation = {
'value': {'readonly': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Operation]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(OperationListResult, self).__init__(**kwargs)
self.value = None
self.next_link = None
[docs]class OutputDirectory(msrest.serialization.Model):
"""Output directory for the job.
All required parameters must be populated in order to send to Azure.
:param id: Required. The ID of the output directory. The job can use AZ_BATCHAI\ *OUTPUT*\
:code:`<id>` environment variable to find the directory path, where :code:`<id>` is the value
of id attribute.
:type id: str
:param path_prefix: Required. The prefix path where the output directory will be created. Note,
this is an absolute path to prefix. E.g. $AZ_BATCHAI_MOUNT_ROOT/MyNFS/MyLogs. The full path to
the output directory by combining pathPrefix, jobOutputDirectoryPathSegment (reported by get
job) and pathSuffix.
:type path_prefix: str
:param path_suffix: The suffix path where the output directory will be created. E.g. models.
You can find the full path to the output directory by combining pathPrefix,
jobOutputDirectoryPathSegment (reported by get job) and pathSuffix.
:type path_suffix: str
"""
_validation = {
'id': {'required': True},
'path_prefix': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'path_prefix': {'key': 'pathPrefix', 'type': 'str'},
'path_suffix': {'key': 'pathSuffix', 'type': 'str'},
}
def __init__(
self,
*,
id: str,
path_prefix: str,
path_suffix: Optional[str] = None,
**kwargs
):
super(OutputDirectory, self).__init__(**kwargs)
self.id = id
self.path_prefix = path_prefix
self.path_suffix = path_suffix
[docs]class PrivateRegistryCredentials(msrest.serialization.Model):
"""Credentials to access a container image in a private repository.
All required parameters must be populated in order to send to Azure.
:param username: Required. User name to login to the repository.
:type username: str
:param password: User password to login to the docker repository. One of password or
passwordSecretReference must be specified.
:type password: str
:param password_secret_reference: KeyVault Secret storing the password. Users can store their
secrets in Azure KeyVault and pass it to the Batch AI service to integrate with KeyVault. One
of password or passwordSecretReference must be specified.
:type password_secret_reference: ~batch_ai.models.KeyVaultSecretReference
"""
_validation = {
'username': {'required': True},
}
_attribute_map = {
'username': {'key': 'username', 'type': 'str'},
'password': {'key': 'password', 'type': 'str'},
'password_secret_reference': {'key': 'passwordSecretReference', 'type': 'KeyVaultSecretReference'},
}
def __init__(
self,
*,
username: str,
password: Optional[str] = None,
password_secret_reference: Optional["KeyVaultSecretReference"] = None,
**kwargs
):
super(PrivateRegistryCredentials, self).__init__(**kwargs)
self.username = username
self.password = password
self.password_secret_reference = password_secret_reference
[docs]class PyTorchSettings(msrest.serialization.Model):
"""pyTorch job settings.
All required parameters must be populated in order to send to Azure.
:param python_script_file_path: Required. The python script to execute.
:type python_script_file_path: str
:param python_interpreter_path: The path to the Python interpreter.
:type python_interpreter_path: str
:param command_line_args: Command line arguments that need to be passed to the python script.
:type command_line_args: str
:param process_count: Number of processes to launch for the job execution. The default value
for this property is equal to nodeCount property.
:type process_count: int
:param communication_backend: Type of the communication backend for distributed jobs. Valid
values are 'TCP', 'Gloo' or 'MPI'. Not required for non-distributed jobs.
:type communication_backend: str
"""
_validation = {
'python_script_file_path': {'required': True},
}
_attribute_map = {
'python_script_file_path': {'key': 'pythonScriptFilePath', 'type': 'str'},
'python_interpreter_path': {'key': 'pythonInterpreterPath', 'type': 'str'},
'command_line_args': {'key': 'commandLineArgs', 'type': 'str'},
'process_count': {'key': 'processCount', 'type': 'int'},
'communication_backend': {'key': 'communicationBackend', 'type': 'str'},
}
def __init__(
self,
*,
python_script_file_path: str,
python_interpreter_path: Optional[str] = None,
command_line_args: Optional[str] = None,
process_count: Optional[int] = None,
communication_backend: Optional[str] = None,
**kwargs
):
super(PyTorchSettings, self).__init__(**kwargs)
self.python_script_file_path = python_script_file_path
self.python_interpreter_path = python_interpreter_path
self.command_line_args = command_line_args
self.process_count = process_count
self.communication_backend = communication_backend
[docs]class Resource(msrest.serialization.Model):
"""A definition of an Azure resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The ID of the resource.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar location: The location of the resource.
:vartype location: str
:ivar tags: A set of tags. The tags of the resource.
:vartype tags: dict[str, str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'readonly': True},
'tags': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(Resource, self).__init__(**kwargs)
self.id = None
self.name = None
self.type = None
self.location = None
self.tags = None
[docs]class ResourceId(msrest.serialization.Model):
"""Represents a resource ID. For example, for a subnet, it is the resource URL for the subnet.
All required parameters must be populated in order to send to Azure.
:param id: Required. The ID of the resource.
:type id: str
"""
_validation = {
'id': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
*,
id: str,
**kwargs
):
super(ResourceId, self).__init__(**kwargs)
self.id = id
[docs]class ScaleSettings(msrest.serialization.Model):
"""At least one of manual or autoScale settings must be specified. Only one of manual or autoScale settings can be specified. If autoScale settings are specified, the system automatically scales the cluster up and down (within the supplied limits) based on the pending jobs on the cluster.
:param manual: Manual scale settings for the cluster.
:type manual: ~batch_ai.models.ManualScaleSettings
:param auto_scale: Auto-scale settings for the cluster.
:type auto_scale: ~batch_ai.models.AutoScaleSettings
"""
_attribute_map = {
'manual': {'key': 'manual', 'type': 'ManualScaleSettings'},
'auto_scale': {'key': 'autoScale', 'type': 'AutoScaleSettings'},
}
def __init__(
self,
*,
manual: Optional["ManualScaleSettings"] = None,
auto_scale: Optional["AutoScaleSettings"] = None,
**kwargs
):
super(ScaleSettings, self).__init__(**kwargs)
self.manual = manual
self.auto_scale = auto_scale
[docs]class SetupTask(msrest.serialization.Model):
"""Specifies a setup task which can be used to customize the compute nodes of the cluster.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param command_line: Required. The command line to be executed on each cluster's node after it
being allocated or rebooted. The command is executed in a bash subshell as a root.
:type command_line: str
:param environment_variables: A collection of user defined environment variables to be set for
setup task.
:type environment_variables: list[~batch_ai.models.EnvironmentVariable]
:param secrets: A collection of user defined environment variables with secret values to be set
for the setup task. Server will never report values of these variables back.
:type secrets: list[~batch_ai.models.EnvironmentVariableWithSecretValue]
:param std_out_err_path_prefix: Required. The prefix of a path where the Batch AI service will
upload the stdout, stderr and execution log of the setup task.
:type std_out_err_path_prefix: str
:ivar std_out_err_path_suffix: A path segment appended by Batch AI to stdOutErrPathPrefix to
form a path where stdout, stderr and execution log of the setup task will be uploaded. Batch AI
creates the setup task output directories under an unique path to avoid conflicts between
different clusters. The full path can be obtained by concatenation of stdOutErrPathPrefix and
stdOutErrPathSuffix.
:vartype std_out_err_path_suffix: str
"""
_validation = {
'command_line': {'required': True},
'std_out_err_path_prefix': {'required': True},
'std_out_err_path_suffix': {'readonly': True},
}
_attribute_map = {
'command_line': {'key': 'commandLine', 'type': 'str'},
'environment_variables': {'key': 'environmentVariables', 'type': '[EnvironmentVariable]'},
'secrets': {'key': 'secrets', 'type': '[EnvironmentVariableWithSecretValue]'},
'std_out_err_path_prefix': {'key': 'stdOutErrPathPrefix', 'type': 'str'},
'std_out_err_path_suffix': {'key': 'stdOutErrPathSuffix', 'type': 'str'},
}
def __init__(
self,
*,
command_line: str,
std_out_err_path_prefix: str,
environment_variables: Optional[List["EnvironmentVariable"]] = None,
secrets: Optional[List["EnvironmentVariableWithSecretValue"]] = None,
**kwargs
):
super(SetupTask, self).__init__(**kwargs)
self.command_line = command_line
self.environment_variables = environment_variables
self.secrets = secrets
self.std_out_err_path_prefix = std_out_err_path_prefix
self.std_out_err_path_suffix = None
[docs]class SshConfiguration(msrest.serialization.Model):
"""SSH configuration.
All required parameters must be populated in order to send to Azure.
:param public_ips_to_allow: List of source IP ranges to allow SSH connection from. The default
value is '*' (all source IPs are allowed). Maximum number of IP ranges that can be specified is
400.
:type public_ips_to_allow: list[str]
:param user_account_settings: Required. Settings for administrator user account to be created
on a node. The account can be used to establish SSH connection to the node.
:type user_account_settings: ~batch_ai.models.UserAccountSettings
"""
_validation = {
'user_account_settings': {'required': True},
}
_attribute_map = {
'public_ips_to_allow': {'key': 'publicIPsToAllow', 'type': '[str]'},
'user_account_settings': {'key': 'userAccountSettings', 'type': 'UserAccountSettings'},
}
def __init__(
self,
*,
user_account_settings: "UserAccountSettings",
public_ips_to_allow: Optional[List[str]] = None,
**kwargs
):
super(SshConfiguration, self).__init__(**kwargs)
self.public_ips_to_allow = public_ips_to_allow
self.user_account_settings = user_account_settings
[docs]class TensorFlowSettings(msrest.serialization.Model):
"""TensorFlow job settings.
All required parameters must be populated in order to send to Azure.
:param python_script_file_path: Required. The python script to execute.
:type python_script_file_path: str
:param python_interpreter_path: The path to the Python interpreter.
:type python_interpreter_path: str
:param master_command_line_args: Command line arguments that need to be passed to the python
script for the master task.
:type master_command_line_args: str
:param worker_command_line_args: Command line arguments that need to be passed to the python
script for the worker task. Optional for single process jobs.
:type worker_command_line_args: str
:param parameter_server_command_line_args: Command line arguments that need to be passed to the
python script for the parameter server. Optional for single process jobs.
:type parameter_server_command_line_args: str
:param worker_count: The number of worker tasks. If specified, the value must be less than or
equal to (nodeCount * numberOfGPUs per VM). If not specified, the default value is equal to
nodeCount. This property can be specified only for distributed TensorFlow training.
:type worker_count: int
:param parameter_server_count: The number of parameter server tasks. If specified, the value
must be less than or equal to nodeCount. If not specified, the default value is equal to 1 for
distributed TensorFlow training. This property can be specified only for distributed TensorFlow
training.
:type parameter_server_count: int
"""
_validation = {
'python_script_file_path': {'required': True},
}
_attribute_map = {
'python_script_file_path': {'key': 'pythonScriptFilePath', 'type': 'str'},
'python_interpreter_path': {'key': 'pythonInterpreterPath', 'type': 'str'},
'master_command_line_args': {'key': 'masterCommandLineArgs', 'type': 'str'},
'worker_command_line_args': {'key': 'workerCommandLineArgs', 'type': 'str'},
'parameter_server_command_line_args': {'key': 'parameterServerCommandLineArgs', 'type': 'str'},
'worker_count': {'key': 'workerCount', 'type': 'int'},
'parameter_server_count': {'key': 'parameterServerCount', 'type': 'int'},
}
def __init__(
self,
*,
python_script_file_path: str,
python_interpreter_path: Optional[str] = None,
master_command_line_args: Optional[str] = None,
worker_command_line_args: Optional[str] = None,
parameter_server_command_line_args: Optional[str] = None,
worker_count: Optional[int] = None,
parameter_server_count: Optional[int] = None,
**kwargs
):
super(TensorFlowSettings, self).__init__(**kwargs)
self.python_script_file_path = python_script_file_path
self.python_interpreter_path = python_interpreter_path
self.master_command_line_args = master_command_line_args
self.worker_command_line_args = worker_command_line_args
self.parameter_server_command_line_args = parameter_server_command_line_args
self.worker_count = worker_count
self.parameter_server_count = parameter_server_count
[docs]class UnmanagedFileSystemReference(msrest.serialization.Model):
"""Unmanaged file system mounting configuration.
All required parameters must be populated in order to send to Azure.
:param mount_command: Required. Mount command line. Note, Batch AI will append mount path to
the command on its own.
:type mount_command: str
:param relative_mount_path: Required. The relative path on the compute node where the unmanaged
file system will be mounted. Note that all cluster level unmanaged file systems will be mounted
under $AZ_BATCHAI_MOUNT_ROOT location and all job level unmanaged file systems will be mounted
under $AZ_BATCHAI_JOB_MOUNT_ROOT.
:type relative_mount_path: str
"""
_validation = {
'mount_command': {'required': True},
'relative_mount_path': {'required': True},
}
_attribute_map = {
'mount_command': {'key': 'mountCommand', 'type': 'str'},
'relative_mount_path': {'key': 'relativeMountPath', 'type': 'str'},
}
def __init__(
self,
*,
mount_command: str,
relative_mount_path: str,
**kwargs
):
super(UnmanagedFileSystemReference, self).__init__(**kwargs)
self.mount_command = mount_command
self.relative_mount_path = relative_mount_path
[docs]class Usage(msrest.serialization.Model):
"""Describes Batch AI Resource Usage.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar unit: An enum describing the unit of usage measurement. Possible values include: "Count".
:vartype unit: str or ~batch_ai.models.UsageUnit
:ivar current_value: The current usage of the resource.
:vartype current_value: int
:ivar limit: The maximum permitted usage of the resource.
:vartype limit: long
:ivar name: The name of the type of usage.
:vartype name: ~batch_ai.models.UsageName
"""
_validation = {
'unit': {'readonly': True},
'current_value': {'readonly': True},
'limit': {'readonly': True},
'name': {'readonly': True},
}
_attribute_map = {
'unit': {'key': 'unit', 'type': 'str'},
'current_value': {'key': 'currentValue', 'type': 'int'},
'limit': {'key': 'limit', 'type': 'long'},
'name': {'key': 'name', 'type': 'UsageName'},
}
def __init__(
self,
**kwargs
):
super(Usage, self).__init__(**kwargs)
self.unit = None
self.current_value = None
self.limit = None
self.name = None
[docs]class UsageName(msrest.serialization.Model):
"""The Usage Names.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: The name of the resource.
:vartype value: str
:ivar localized_value: The localized name of the resource.
:vartype localized_value: str
"""
_validation = {
'value': {'readonly': True},
'localized_value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': 'str'},
'localized_value': {'key': 'localizedValue', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(UsageName, self).__init__(**kwargs)
self.value = None
self.localized_value = None
[docs]class UserAccountSettings(msrest.serialization.Model):
"""Settings for user account that gets created on each on the nodes of a cluster.
All required parameters must be populated in order to send to Azure.
:param admin_user_name: Required. Name of the administrator user account which can be used to
SSH to nodes.
:type admin_user_name: str
:param admin_user_ssh_public_key: SSH public key of the administrator user account.
:type admin_user_ssh_public_key: str
:param admin_user_password: Password of the administrator user account.
:type admin_user_password: str
"""
_validation = {
'admin_user_name': {'required': True},
}
_attribute_map = {
'admin_user_name': {'key': 'adminUserName', 'type': 'str'},
'admin_user_ssh_public_key': {'key': 'adminUserSshPublicKey', 'type': 'str'},
'admin_user_password': {'key': 'adminUserPassword', 'type': 'str'},
}
def __init__(
self,
*,
admin_user_name: str,
admin_user_ssh_public_key: Optional[str] = None,
admin_user_password: Optional[str] = None,
**kwargs
):
super(UserAccountSettings, self).__init__(**kwargs)
self.admin_user_name = admin_user_name
self.admin_user_ssh_public_key = admin_user_ssh_public_key
self.admin_user_password = admin_user_password
[docs]class VirtualMachineConfiguration(msrest.serialization.Model):
"""VM configuration.
:param image_reference: OS image reference for cluster nodes.
:type image_reference: ~batch_ai.models.ImageReference
"""
_attribute_map = {
'image_reference': {'key': 'imageReference', 'type': 'ImageReference'},
}
def __init__(
self,
*,
image_reference: Optional["ImageReference"] = None,
**kwargs
):
super(VirtualMachineConfiguration, self).__init__(**kwargs)
self.image_reference = image_reference
[docs]class Workspace(Resource):
"""Batch AI Workspace information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The ID of the resource.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar location: The location of the resource.
:vartype location: str
:ivar tags: A set of tags. The tags of the resource.
:vartype tags: dict[str, str]
:ivar creation_time: Time when the Workspace was created.
:vartype creation_time: ~datetime.datetime
:ivar provisioning_state: The provisioned state of the Workspace. Possible values include:
"creating", "succeeded", "failed", "deleting".
:vartype provisioning_state: str or ~batch_ai.models.ProvisioningState
:ivar provisioning_state_transition_time: The time at which the workspace entered its current
provisioning state.
:vartype provisioning_state_transition_time: ~datetime.datetime
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'readonly': True},
'tags': {'readonly': True},
'creation_time': {'readonly': True},
'provisioning_state': {'readonly': True},
'provisioning_state_transition_time': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'provisioning_state_transition_time': {'key': 'properties.provisioningStateTransitionTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(Workspace, self).__init__(**kwargs)
self.creation_time = None
self.provisioning_state = None
self.provisioning_state_transition_time = None
[docs]class WorkspaceCreateParameters(msrest.serialization.Model):
"""Workspace creation parameters.
All required parameters must be populated in order to send to Azure.
:param location: Required. The region in which to create the Workspace.
:type location: str
:param tags: A set of tags. The user specified tags associated with the Workspace.
:type tags: dict[str, str]
"""
_validation = {
'location': {'required': True},
}
_attribute_map = {
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(
self,
*,
location: str,
tags: Optional[Dict[str, str]] = None,
**kwargs
):
super(WorkspaceCreateParameters, self).__init__(**kwargs)
self.location = location
self.tags = tags
[docs]class WorkspaceListResult(msrest.serialization.Model):
"""Values returned by the List operation.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: The collection of workspaces.
:vartype value: list[~batch_ai.models.Workspace]
:ivar next_link: The continuation token.
:vartype next_link: str
"""
_validation = {
'value': {'readonly': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Workspace]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WorkspaceListResult, self).__init__(**kwargs)
self.value = None
self.next_link = None
[docs]class WorkspacesListByResourceGroupOptions(msrest.serialization.Model):
"""Parameter group.
:param max_results: The maximum number of items to return in the response. A maximum of 1000
files can be returned.
:type max_results: int
"""
_validation = {
'max_results': {'maximum': 1000, 'minimum': 1},
}
_attribute_map = {
'max_results': {'key': 'maxResults', 'type': 'int'},
}
def __init__(
self,
*,
max_results: Optional[int] = 1000,
**kwargs
):
super(WorkspacesListByResourceGroupOptions, self).__init__(**kwargs)
self.max_results = max_results
[docs]class WorkspacesListOptions(msrest.serialization.Model):
"""Parameter group.
:param max_results: The maximum number of items to return in the response. A maximum of 1000
files can be returned.
:type max_results: int
"""
_validation = {
'max_results': {'maximum': 1000, 'minimum': 1},
}
_attribute_map = {
'max_results': {'key': 'maxResults', 'type': 'int'},
}
def __init__(
self,
*,
max_results: Optional[int] = 1000,
**kwargs
):
super(WorkspacesListOptions, self).__init__(**kwargs)
self.max_results = max_results
[docs]class WorkspaceUpdateParameters(msrest.serialization.Model):
"""Workspace update parameters.
:param tags: A set of tags. The user specified tags associated with the Workspace.
:type tags: dict[str, str]
"""
_attribute_map = {
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(
self,
*,
tags: Optional[Dict[str, str]] = None,
**kwargs
):
super(WorkspaceUpdateParameters, self).__init__(**kwargs)
self.tags = tags