text
stringlengths 5
22M
| id
stringlengths 12
177
| metadata
dict | __index_level_0__
int64 0
1.37k
|
---|---|---|---|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest import Serializer, Deserializer
from ...client import Client
from . import models
class NuGetClient(Client):
"""NuGet
:param str base_url: Service URL
:param Authentication creds: Authenticated credentials.
"""
def __init__(self, base_url=None, creds=None):
super(NuGetClient, self).__init__(base_url, creds)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
resource_area_identifier = 'b3be7473-68ea-4a81-bfc7-9530baaa19ad'
def download_package(self, feed_id, package_name, package_version, project=None, source_protocol_version=None, **kwargs):
"""DownloadPackage.
[Preview API] Download a package version directly.
:param str feed_id: Name or ID of the feed.
:param str package_name: Name of the package.
:param str package_version: Version of the package.
:param str project: Project ID or project name
:param str source_protocol_version: Unused
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str')
if package_name is not None:
route_values['packageName'] = self._serialize.url('package_name', package_name, 'str')
if package_version is not None:
route_values['packageVersion'] = self._serialize.url('package_version', package_version, 'str')
query_parameters = {}
if source_protocol_version is not None:
query_parameters['sourceProtocolVersion'] = self._serialize.query('source_protocol_version', source_protocol_version, 'str')
response = self._send(http_method='GET',
location_id='6ea81b8c-7386-490b-a71f-6cf23c80b388',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters,
accept_media_type='application/octet-stream')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def update_package_versions(self, batch_request, feed_id, project=None):
"""UpdatePackageVersions.
[Preview API] Update several packages from a single feed in a single request. The updates to the packages do not happen atomically.
:param :class:`<NuGetPackagesBatchRequest> <azure.devops.v7_1.nuget.models.NuGetPackagesBatchRequest>` batch_request: Information about the packages to update, the operation to perform, and its associated data.
:param str feed_id: Name or ID of the feed.
:param str project: Project ID or project name
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str')
content = self._serialize.body(batch_request, 'NuGetPackagesBatchRequest')
self._send(http_method='POST',
location_id='00c58ea7-d55f-49de-b59f-983533ae11dc',
version='7.1-preview.1',
route_values=route_values,
content=content)
def update_recycle_bin_package_versions(self, batch_request, feed_id, project=None):
"""UpdateRecycleBinPackageVersions.
[Preview API] Delete or restore several package versions from the recycle bin.
:param :class:`<NuGetPackagesBatchRequest> <azure.devops.v7_1.nuget.models.NuGetPackagesBatchRequest>` batch_request: Information about the packages to update, the operation to perform, and its associated data. <c>Operation</c> must be <c>PermanentDelete</c> or <c>RestoreToFeed</c>
:param str feed_id: Name or ID of the feed.
:param str project: Project ID or project name
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str')
content = self._serialize.body(batch_request, 'NuGetPackagesBatchRequest')
self._send(http_method='POST',
location_id='6479ac16-32f4-40f7-aa96-9414de861352',
version='7.1-preview.1',
route_values=route_values,
content=content)
def delete_package_version_from_recycle_bin(self, feed_id, package_name, package_version, project=None):
"""DeletePackageVersionFromRecycleBin.
[Preview API] Delete a package version from a feed's recycle bin.
:param str feed_id: Name or ID of the feed.
:param str package_name: Name of the package.
:param str package_version: Version of the package.
:param str project: Project ID or project name
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str')
if package_name is not None:
route_values['packageName'] = self._serialize.url('package_name', package_name, 'str')
if package_version is not None:
route_values['packageVersion'] = self._serialize.url('package_version', package_version, 'str')
self._send(http_method='DELETE',
location_id='07e88775-e3cb-4408-bbe1-628e036fac8c',
version='7.1-preview.1',
route_values=route_values)
def get_package_version_metadata_from_recycle_bin(self, feed_id, package_name, package_version, project=None):
"""GetPackageVersionMetadataFromRecycleBin.
[Preview API] View a package version's deletion/recycled status
:param str feed_id: Name or ID of the feed.
:param str package_name: Name of the package.
:param str package_version: Version of the package.
:param str project: Project ID or project name
:rtype: :class:`<NuGetPackageVersionDeletionState> <azure.devops.v7_1.nuget.models.NuGetPackageVersionDeletionState>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str')
if package_name is not None:
route_values['packageName'] = self._serialize.url('package_name', package_name, 'str')
if package_version is not None:
route_values['packageVersion'] = self._serialize.url('package_version', package_version, 'str')
response = self._send(http_method='GET',
location_id='07e88775-e3cb-4408-bbe1-628e036fac8c',
version='7.1-preview.1',
route_values=route_values)
return self._deserialize('NuGetPackageVersionDeletionState', response)
def restore_package_version_from_recycle_bin(self, package_version_details, feed_id, package_name, package_version, project=None):
"""RestorePackageVersionFromRecycleBin.
[Preview API] Restore a package version from a feed's recycle bin back into the active feed.
:param :class:`<NuGetRecycleBinPackageVersionDetails> <azure.devops.v7_1.nuget.models.NuGetRecycleBinPackageVersionDetails>` package_version_details: Set the 'Deleted' member to 'false' to apply the restore operation
:param str feed_id: Name or ID of the feed.
:param str package_name: Name of the package.
:param str package_version: Version of the package.
:param str project: Project ID or project name
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str')
if package_name is not None:
route_values['packageName'] = self._serialize.url('package_name', package_name, 'str')
if package_version is not None:
route_values['packageVersion'] = self._serialize.url('package_version', package_version, 'str')
content = self._serialize.body(package_version_details, 'NuGetRecycleBinPackageVersionDetails')
self._send(http_method='PATCH',
location_id='07e88775-e3cb-4408-bbe1-628e036fac8c',
version='7.1-preview.1',
route_values=route_values,
content=content)
def get_upstreaming_behavior(self, feed_id, package_name, project=None):
"""GetUpstreamingBehavior.
[Preview API] Get the upstreaming behavior of a package within the context of a feed
:param str feed_id: The name or id of the feed
:param str package_name: The name of the package
:param str project: Project ID or project name
:rtype: :class:`<UpstreamingBehavior> <azure.devops.v7_1.nuget.models.UpstreamingBehavior>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str')
if package_name is not None:
route_values['packageName'] = self._serialize.url('package_name', package_name, 'str')
response = self._send(http_method='GET',
location_id='b41eec47-6472-4efa-bcd5-a2c5607b66ec',
version='7.1-preview.1',
route_values=route_values)
return self._deserialize('UpstreamingBehavior', response)
def set_upstreaming_behavior(self, feed_id, package_name, behavior, project=None):
"""SetUpstreamingBehavior.
[Preview API] Set the upstreaming behavior of a package within the context of a feed
:param str feed_id: The name or id of the feed
:param str package_name: The name of the package
:param :class:`<UpstreamingBehavior> <azure.devops.v7_1.nuget.models.UpstreamingBehavior>` behavior: The behavior to apply to the package within the scope of the feed
:param str project: Project ID or project name
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str')
if package_name is not None:
route_values['packageName'] = self._serialize.url('package_name', package_name, 'str')
content = self._serialize.body(behavior, 'UpstreamingBehavior')
self._send(http_method='PATCH',
location_id='b41eec47-6472-4efa-bcd5-a2c5607b66ec',
version='7.1-preview.1',
route_values=route_values,
content=content)
def delete_package_version(self, feed_id, package_name, package_version, project=None):
"""DeletePackageVersion.
[Preview API] Send a package version from the feed to its paired recycle bin.
:param str feed_id: Name or ID of the feed.
:param str package_name: Name of the package to delete.
:param str package_version: Version of the package to delete.
:param str project: Project ID or project name
:rtype: :class:`<Package> <azure.devops.v7_1.nuget.models.Package>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str')
if package_name is not None:
route_values['packageName'] = self._serialize.url('package_name', package_name, 'str')
if package_version is not None:
route_values['packageVersion'] = self._serialize.url('package_version', package_version, 'str')
response = self._send(http_method='DELETE',
location_id='36c9353b-e250-4c57-b040-513c186c3905',
version='7.1-preview.1',
route_values=route_values)
return self._deserialize('Package', response)
def get_package_version(self, feed_id, package_name, package_version, project=None, show_deleted=None):
"""GetPackageVersion.
[Preview API] Get information about a package version.
:param str feed_id: Name or ID of the feed.
:param str package_name: Name of the package.
:param str package_version: Version of the package.
:param str project: Project ID or project name
:param bool show_deleted: True to include deleted packages in the response.
:rtype: :class:`<Package> <azure.devops.v7_1.nuget.models.Package>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str')
if package_name is not None:
route_values['packageName'] = self._serialize.url('package_name', package_name, 'str')
if package_version is not None:
route_values['packageVersion'] = self._serialize.url('package_version', package_version, 'str')
query_parameters = {}
if show_deleted is not None:
query_parameters['showDeleted'] = self._serialize.query('show_deleted', show_deleted, 'bool')
response = self._send(http_method='GET',
location_id='36c9353b-e250-4c57-b040-513c186c3905',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('Package', response)
def update_package_version(self, package_version_details, feed_id, package_name, package_version, project=None):
"""UpdatePackageVersion.
[Preview API] Set mutable state on a package version.
:param :class:`<PackageVersionDetails> <azure.devops.v7_1.nuget.models.PackageVersionDetails>` package_version_details: New state to apply to the referenced package.
:param str feed_id: Name or ID of the feed.
:param str package_name: Name of the package to update.
:param str package_version: Version of the package to update.
:param str project: Project ID or project name
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str')
if package_name is not None:
route_values['packageName'] = self._serialize.url('package_name', package_name, 'str')
if package_version is not None:
route_values['packageVersion'] = self._serialize.url('package_version', package_version, 'str')
content = self._serialize.body(package_version_details, 'PackageVersionDetails')
self._send(http_method='PATCH',
location_id='36c9353b-e250-4c57-b040-513c186c3905',
version='7.1-preview.1',
route_values=route_values,
content=content)
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/nuget/nuget_client.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/nuget/nuget_client.py",
"repo_id": "azure-devops-python-api",
"token_count": 6954
}
| 367 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class AgentArtifactDefinition(Model):
"""
:param alias: Gets or sets the artifact definition alias.
:type alias: str
:param artifact_type: Gets or sets the artifact type.
:type artifact_type: object
:param details: Gets or sets the artifact definition details.
:type details: str
:param name: Gets or sets the name of artifact definition.
:type name: str
:param version: Gets or sets the version of artifact definition.
:type version: str
"""
_attribute_map = {
'alias': {'key': 'alias', 'type': 'str'},
'artifact_type': {'key': 'artifactType', 'type': 'object'},
'details': {'key': 'details', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'}
}
def __init__(self, alias=None, artifact_type=None, details=None, name=None, version=None):
super(AgentArtifactDefinition, self).__init__()
self.alias = alias
self.artifact_type = artifact_type
self.details = details
self.name = name
self.version = version
class ApprovalOptions(Model):
"""
:param auto_triggered_and_previous_environment_approved_can_be_skipped: Specify whether the approval can be skipped if the same approver approved the previous stage.
:type auto_triggered_and_previous_environment_approved_can_be_skipped: bool
:param enforce_identity_revalidation: Specify whether revalidate identity of approver before completing the approval.
:type enforce_identity_revalidation: bool
:param execution_order: Approvals execution order.
:type execution_order: object
:param release_creator_can_be_approver: Specify whether the user requesting a release or deployment should allow to approver.
:type release_creator_can_be_approver: bool
:param required_approver_count: The number of approvals required to move release forward. '0' means all approvals required.
:type required_approver_count: int
:param timeout_in_minutes: Approval timeout. Approval default timeout is 30 days. Maximum allowed timeout is 365 days. '0' means default timeout i.e 30 days.
:type timeout_in_minutes: int
"""
_attribute_map = {
'auto_triggered_and_previous_environment_approved_can_be_skipped': {'key': 'autoTriggeredAndPreviousEnvironmentApprovedCanBeSkipped', 'type': 'bool'},
'enforce_identity_revalidation': {'key': 'enforceIdentityRevalidation', 'type': 'bool'},
'execution_order': {'key': 'executionOrder', 'type': 'object'},
'release_creator_can_be_approver': {'key': 'releaseCreatorCanBeApprover', 'type': 'bool'},
'required_approver_count': {'key': 'requiredApproverCount', 'type': 'int'},
'timeout_in_minutes': {'key': 'timeoutInMinutes', 'type': 'int'}
}
def __init__(self, auto_triggered_and_previous_environment_approved_can_be_skipped=None, enforce_identity_revalidation=None, execution_order=None, release_creator_can_be_approver=None, required_approver_count=None, timeout_in_minutes=None):
super(ApprovalOptions, self).__init__()
self.auto_triggered_and_previous_environment_approved_can_be_skipped = auto_triggered_and_previous_environment_approved_can_be_skipped
self.enforce_identity_revalidation = enforce_identity_revalidation
self.execution_order = execution_order
self.release_creator_can_be_approver = release_creator_can_be_approver
self.required_approver_count = required_approver_count
self.timeout_in_minutes = timeout_in_minutes
class Artifact(Model):
"""
:param alias: Gets or sets alias.
:type alias: str
:param definition_reference: Gets or sets definition reference. e.g. {"project":{"id":"fed755ea-49c5-4399-acea-fd5b5aa90a6c","name":"myProject"},"definition":{"id":"1","name":"mybuildDefinition"},"connection":{"id":"1","name":"myConnection"}}.
:type definition_reference: dict
:param is_primary: Indicates whether artifact is primary or not.
:type is_primary: bool
:param is_retained: Indicates whether artifact is retained by release or not.
:type is_retained: bool
:param source_id:
:type source_id: str
:param type: Gets or sets type. It can have value as 'Build', 'Jenkins', 'GitHub', 'Nuget', 'Team Build (external)', 'ExternalTFSBuild', 'Git', 'TFVC', 'ExternalTfsXamlBuild'.
:type type: str
"""
_attribute_map = {
'alias': {'key': 'alias', 'type': 'str'},
'definition_reference': {'key': 'definitionReference', 'type': '{ArtifactSourceReference}'},
'is_primary': {'key': 'isPrimary', 'type': 'bool'},
'is_retained': {'key': 'isRetained', 'type': 'bool'},
'source_id': {'key': 'sourceId', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'}
}
def __init__(self, alias=None, definition_reference=None, is_primary=None, is_retained=None, source_id=None, type=None):
super(Artifact, self).__init__()
self.alias = alias
self.definition_reference = definition_reference
self.is_primary = is_primary
self.is_retained = is_retained
self.source_id = source_id
self.type = type
class ArtifactMetadata(Model):
"""
:param alias: Sets alias of artifact.
:type alias: str
:param instance_reference: Sets instance reference of artifact. e.g. for build artifact it is build number.
:type instance_reference: :class:`BuildVersion <azure.devops.v7_1.release.models.BuildVersion>`
"""
_attribute_map = {
'alias': {'key': 'alias', 'type': 'str'},
'instance_reference': {'key': 'instanceReference', 'type': 'BuildVersion'}
}
def __init__(self, alias=None, instance_reference=None):
super(ArtifactMetadata, self).__init__()
self.alias = alias
self.instance_reference = instance_reference
class ArtifactSourceReference(Model):
"""
:param id: ID of the artifact source.
:type id: str
:param name: Name of the artifact source.
:type name: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, id=None, name=None):
super(ArtifactSourceReference, self).__init__()
self.id = id
self.name = name
class ArtifactTriggerConfiguration(Model):
"""
:param is_trigger_supported: Gets or sets the whether trigger is supported or not.
:type is_trigger_supported: bool
:param is_trigger_supported_only_in_hosted: Gets or sets the whether trigger is supported only on hosted environment.
:type is_trigger_supported_only_in_hosted: bool
:param is_webhook_supported_at_server_level: Gets or sets the whether webhook is supported at server level.
:type is_webhook_supported_at_server_level: bool
:param payload_hash_header_name: Gets or sets the payload hash header name for the artifact trigger configuration.
:type payload_hash_header_name: str
:param resources: Gets or sets the resources for artifact trigger configuration.
:type resources: dict
:param webhook_payload_mapping: Gets or sets the webhook payload mapping for artifact trigger configuration.
:type webhook_payload_mapping: dict
"""
_attribute_map = {
'is_trigger_supported': {'key': 'isTriggerSupported', 'type': 'bool'},
'is_trigger_supported_only_in_hosted': {'key': 'isTriggerSupportedOnlyInHosted', 'type': 'bool'},
'is_webhook_supported_at_server_level': {'key': 'isWebhookSupportedAtServerLevel', 'type': 'bool'},
'payload_hash_header_name': {'key': 'payloadHashHeaderName', 'type': 'str'},
'resources': {'key': 'resources', 'type': '{str}'},
'webhook_payload_mapping': {'key': 'webhookPayloadMapping', 'type': '{str}'}
}
def __init__(self, is_trigger_supported=None, is_trigger_supported_only_in_hosted=None, is_webhook_supported_at_server_level=None, payload_hash_header_name=None, resources=None, webhook_payload_mapping=None):
super(ArtifactTriggerConfiguration, self).__init__()
self.is_trigger_supported = is_trigger_supported
self.is_trigger_supported_only_in_hosted = is_trigger_supported_only_in_hosted
self.is_webhook_supported_at_server_level = is_webhook_supported_at_server_level
self.payload_hash_header_name = payload_hash_header_name
self.resources = resources
self.webhook_payload_mapping = webhook_payload_mapping
class ArtifactTypeDefinition(Model):
"""
:param artifact_trigger_configuration: Gets or sets the artifact trigger configuration of artifact type definition.
:type artifact_trigger_configuration: :class:`ArtifactTriggerConfiguration <azure.devops.v7_1.release.models.ArtifactTriggerConfiguration>`
:param artifact_type: Gets or sets the artifact type of artifact type definition. Valid values are 'Build', 'Package', 'Source' or 'ContainerImage'.
:type artifact_type: str
:param display_name: Gets or sets the display name of artifact type definition.
:type display_name: str
:param endpoint_type_id: Gets or sets the endpoint type id of artifact type definition.
:type endpoint_type_id: str
:param input_descriptors: Gets or sets the input descriptors of artifact type definition.
:type input_descriptors: list of :class:`InputDescriptor <azure.devops.v7_1.release.models.InputDescriptor>`
:param is_commits_traceability_supported: Gets or sets the is commits tracebility supported value of artifact type defintion.
:type is_commits_traceability_supported: bool
:param is_workitems_traceability_supported: Gets or sets the is workitems tracebility supported value of artifact type defintion.
:type is_workitems_traceability_supported: bool
:param name: Gets or sets the name of artifact type definition.
:type name: str
:param unique_source_identifier: Gets or sets the unique source identifier of artifact type definition.
:type unique_source_identifier: str
"""
_attribute_map = {
'artifact_trigger_configuration': {'key': 'artifactTriggerConfiguration', 'type': 'ArtifactTriggerConfiguration'},
'artifact_type': {'key': 'artifactType', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'endpoint_type_id': {'key': 'endpointTypeId', 'type': 'str'},
'input_descriptors': {'key': 'inputDescriptors', 'type': '[InputDescriptor]'},
'is_commits_traceability_supported': {'key': 'isCommitsTraceabilitySupported', 'type': 'bool'},
'is_workitems_traceability_supported': {'key': 'isWorkitemsTraceabilitySupported', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'unique_source_identifier': {'key': 'uniqueSourceIdentifier', 'type': 'str'}
}
def __init__(self, artifact_trigger_configuration=None, artifact_type=None, display_name=None, endpoint_type_id=None, input_descriptors=None, is_commits_traceability_supported=None, is_workitems_traceability_supported=None, name=None, unique_source_identifier=None):
super(ArtifactTypeDefinition, self).__init__()
self.artifact_trigger_configuration = artifact_trigger_configuration
self.artifact_type = artifact_type
self.display_name = display_name
self.endpoint_type_id = endpoint_type_id
self.input_descriptors = input_descriptors
self.is_commits_traceability_supported = is_commits_traceability_supported
self.is_workitems_traceability_supported = is_workitems_traceability_supported
self.name = name
self.unique_source_identifier = unique_source_identifier
class ArtifactVersion(Model):
"""
:param alias: Gets or sets the alias of artifact.
:type alias: str
:param default_version: Gets or sets the default version of artifact.
:type default_version: :class:`BuildVersion <azure.devops.v7_1.release.models.BuildVersion>`
:param error_message: Gets or sets the error message encountered during querying of versions for artifact.
:type error_message: str
:param source_id:
:type source_id: str
:param versions: Gets or sets the list of build versions of artifact.
:type versions: list of :class:`BuildVersion <azure.devops.v7_1.release.models.BuildVersion>`
"""
_attribute_map = {
'alias': {'key': 'alias', 'type': 'str'},
'default_version': {'key': 'defaultVersion', 'type': 'BuildVersion'},
'error_message': {'key': 'errorMessage', 'type': 'str'},
'source_id': {'key': 'sourceId', 'type': 'str'},
'versions': {'key': 'versions', 'type': '[BuildVersion]'}
}
def __init__(self, alias=None, default_version=None, error_message=None, source_id=None, versions=None):
super(ArtifactVersion, self).__init__()
self.alias = alias
self.default_version = default_version
self.error_message = error_message
self.source_id = source_id
self.versions = versions
class ArtifactVersionQueryResult(Model):
"""
:param artifact_versions: Gets or sets the list for artifact versions of artifact version query result.
:type artifact_versions: list of :class:`ArtifactVersion <azure.devops.v7_1.release.models.ArtifactVersion>`
"""
_attribute_map = {
'artifact_versions': {'key': 'artifactVersions', 'type': '[ArtifactVersion]'}
}
def __init__(self, artifact_versions=None):
super(ArtifactVersionQueryResult, self).__init__()
self.artifact_versions = artifact_versions
class AuthorizationHeader(Model):
"""
:param name:
:type name: str
:param value:
:type value: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'}
}
def __init__(self, name=None, value=None):
super(AuthorizationHeader, self).__init__()
self.name = name
self.value = value
class AutoTriggerIssue(Model):
"""
:param issue:
:type issue: :class:`Issue <azure.devops.v7_1.release.models.Issue>`
:param issue_source:
:type issue_source: object
:param project:
:type project: :class:`ProjectReference <azure.devops.v7_1.release.models.ProjectReference>`
:param release_definition_reference:
:type release_definition_reference: :class:`ReleaseDefinitionShallowReference <azure.devops.v7_1.release.models.ReleaseDefinitionShallowReference>`
:param release_trigger_type:
:type release_trigger_type: object
"""
_attribute_map = {
'issue': {'key': 'issue', 'type': 'Issue'},
'issue_source': {'key': 'issueSource', 'type': 'object'},
'project': {'key': 'project', 'type': 'ProjectReference'},
'release_definition_reference': {'key': 'releaseDefinitionReference', 'type': 'ReleaseDefinitionShallowReference'},
'release_trigger_type': {'key': 'releaseTriggerType', 'type': 'object'}
}
def __init__(self, issue=None, issue_source=None, project=None, release_definition_reference=None, release_trigger_type=None):
super(AutoTriggerIssue, self).__init__()
self.issue = issue
self.issue_source = issue_source
self.project = project
self.release_definition_reference = release_definition_reference
self.release_trigger_type = release_trigger_type
class BuildVersion(Model):
"""
:param commit_message: Gets or sets the commit message for the artifact.
:type commit_message: str
:param definition_id: Gets or sets the definition id.
:type definition_id: str
:param definition_name: Gets or sets the definition name.
:type definition_name: str
:param id: Gets or sets the build id.
:type id: str
:param is_multi_definition_type: Gets or sets if the artifact supports multiple definitions.
:type is_multi_definition_type: bool
:param name: Gets or sets the build number.
:type name: str
:param source_branch: Gets or sets the source branch for the artifact.
:type source_branch: str
:param source_pull_request_version: Gets or sets the source pull request version for the artifact.
:type source_pull_request_version: :class:`SourcePullRequestVersion <azure.devops.v7_1.release.models.SourcePullRequestVersion>`
:param source_repository_id: Gets or sets the repository id for the artifact.
:type source_repository_id: str
:param source_repository_type: Gets or sets the repository type for the artifact.
:type source_repository_type: str
:param source_version: Gets or sets the source version for the artifact.
:type source_version: str
"""
_attribute_map = {
'commit_message': {'key': 'commitMessage', 'type': 'str'},
'definition_id': {'key': 'definitionId', 'type': 'str'},
'definition_name': {'key': 'definitionName', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'is_multi_definition_type': {'key': 'isMultiDefinitionType', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'source_branch': {'key': 'sourceBranch', 'type': 'str'},
'source_pull_request_version': {'key': 'sourcePullRequestVersion', 'type': 'SourcePullRequestVersion'},
'source_repository_id': {'key': 'sourceRepositoryId', 'type': 'str'},
'source_repository_type': {'key': 'sourceRepositoryType', 'type': 'str'},
'source_version': {'key': 'sourceVersion', 'type': 'str'}
}
def __init__(self, commit_message=None, definition_id=None, definition_name=None, id=None, is_multi_definition_type=None, name=None, source_branch=None, source_pull_request_version=None, source_repository_id=None, source_repository_type=None, source_version=None):
super(BuildVersion, self).__init__()
self.commit_message = commit_message
self.definition_id = definition_id
self.definition_name = definition_name
self.id = id
self.is_multi_definition_type = is_multi_definition_type
self.name = name
self.source_branch = source_branch
self.source_pull_request_version = source_pull_request_version
self.source_repository_id = source_repository_id
self.source_repository_type = source_repository_type
self.source_version = source_version
class ComplianceSettings(Model):
"""
:param check_for_credentials_and_other_secrets: Scan the release definition for secrets
:type check_for_credentials_and_other_secrets: bool
"""
_attribute_map = {
'check_for_credentials_and_other_secrets': {'key': 'checkForCredentialsAndOtherSecrets', 'type': 'bool'}
}
def __init__(self, check_for_credentials_and_other_secrets=None):
super(ComplianceSettings, self).__init__()
self.check_for_credentials_and_other_secrets = check_for_credentials_and_other_secrets
class Condition(Model):
"""
:param condition_type: Gets or sets the condition type.
:type condition_type: object
:param name: Gets or sets the name of the condition. e.g. 'ReleaseStarted'.
:type name: str
:param value: Gets or set value of the condition.
:type value: str
"""
_attribute_map = {
'condition_type': {'key': 'conditionType', 'type': 'object'},
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'}
}
def __init__(self, condition_type=None, name=None, value=None):
super(Condition, self).__init__()
self.condition_type = condition_type
self.name = name
self.value = value
class ConfigurationVariableValue(Model):
"""
:param allow_override: Gets and sets if a variable can be overridden at deployment time or not.
:type allow_override: bool
:param is_secret: Gets or sets as variable is secret or not.
:type is_secret: bool
:param value: Gets and sets value of the configuration variable.
:type value: str
"""
_attribute_map = {
'allow_override': {'key': 'allowOverride', 'type': 'bool'},
'is_secret': {'key': 'isSecret', 'type': 'bool'},
'value': {'key': 'value', 'type': 'str'}
}
def __init__(self, allow_override=None, is_secret=None, value=None):
super(ConfigurationVariableValue, self).__init__()
self.allow_override = allow_override
self.is_secret = is_secret
self.value = value
class DataSourceBindingBase(Model):
"""
Represents binding of data source for the service endpoint request.
:param callback_context_template: Pagination format supported by this data source(ContinuationToken/SkipTop).
:type callback_context_template: str
:param callback_required_template: Subsequent calls needed?
:type callback_required_template: str
:param data_source_name: Gets or sets the name of the data source.
:type data_source_name: str
:param endpoint_id: Gets or sets the endpoint Id.
:type endpoint_id: str
:param endpoint_url: Gets or sets the url of the service endpoint.
:type endpoint_url: str
:param headers: Gets or sets the authorization headers.
:type headers: list of :class:`AuthorizationHeader <azure.devops.v7_1.microsoft._team_foundation._distributed_task._common._contracts.models.AuthorizationHeader>`
:param initial_context_template: Defines the initial value of the query params
:type initial_context_template: str
:param parameters: Gets or sets the parameters for the data source.
:type parameters: dict
:param request_content: Gets or sets http request body
:type request_content: str
:param request_verb: Gets or sets http request verb
:type request_verb: str
:param result_selector: Gets or sets the result selector.
:type result_selector: str
:param result_template: Gets or sets the result template.
:type result_template: str
:param target: Gets or sets the target of the data source.
:type target: str
"""
_attribute_map = {
'callback_context_template': {'key': 'callbackContextTemplate', 'type': 'str'},
'callback_required_template': {'key': 'callbackRequiredTemplate', 'type': 'str'},
'data_source_name': {'key': 'dataSourceName', 'type': 'str'},
'endpoint_id': {'key': 'endpointId', 'type': 'str'},
'endpoint_url': {'key': 'endpointUrl', 'type': 'str'},
'headers': {'key': 'headers', 'type': '[AuthorizationHeader]'},
'initial_context_template': {'key': 'initialContextTemplate', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{str}'},
'request_content': {'key': 'requestContent', 'type': 'str'},
'request_verb': {'key': 'requestVerb', 'type': 'str'},
'result_selector': {'key': 'resultSelector', 'type': 'str'},
'result_template': {'key': 'resultTemplate', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'}
}
def __init__(self, callback_context_template=None, callback_required_template=None, data_source_name=None, endpoint_id=None, endpoint_url=None, headers=None, initial_context_template=None, parameters=None, request_content=None, request_verb=None, result_selector=None, result_template=None, target=None):
super(DataSourceBindingBase, self).__init__()
self.callback_context_template = callback_context_template
self.callback_required_template = callback_required_template
self.data_source_name = data_source_name
self.endpoint_id = endpoint_id
self.endpoint_url = endpoint_url
self.headers = headers
self.initial_context_template = initial_context_template
self.parameters = parameters
self.request_content = request_content
self.request_verb = request_verb
self.result_selector = result_selector
self.result_template = result_template
self.target = target
class DefinitionEnvironmentReference(Model):
"""
:param definition_environment_id: Definition environment ID.
:type definition_environment_id: int
:param definition_environment_name: Definition environment name.
:type definition_environment_name: str
:param release_definition_id: ReleaseDefinition ID.
:type release_definition_id: int
:param release_definition_name: ReleaseDefinition name.
:type release_definition_name: str
"""
_attribute_map = {
'definition_environment_id': {'key': 'definitionEnvironmentId', 'type': 'int'},
'definition_environment_name': {'key': 'definitionEnvironmentName', 'type': 'str'},
'release_definition_id': {'key': 'releaseDefinitionId', 'type': 'int'},
'release_definition_name': {'key': 'releaseDefinitionName', 'type': 'str'}
}
def __init__(self, definition_environment_id=None, definition_environment_name=None, release_definition_id=None, release_definition_name=None):
super(DefinitionEnvironmentReference, self).__init__()
self.definition_environment_id = definition_environment_id
self.definition_environment_name = definition_environment_name
self.release_definition_id = release_definition_id
self.release_definition_name = release_definition_name
class Deployment(Model):
"""
:param _links: Gets links to access the deployment.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.release.models.ReferenceLinks>`
:param attempt: Gets attempt number.
:type attempt: int
:param completed_on: Gets the date on which deployment is complete.
:type completed_on: datetime
:param conditions: Gets the list of condition associated with deployment.
:type conditions: list of :class:`Condition <azure.devops.v7_1.release.models.Condition>`
:param definition_environment_id: Gets release definition environment id.
:type definition_environment_id: int
:param deployment_status: Gets status of the deployment.
:type deployment_status: object
:param id: Gets the unique identifier for deployment.
:type id: int
:param last_modified_by: Gets the identity who last modified the deployment.
:type last_modified_by: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param last_modified_on: Gets the date on which deployment is last modified.
:type last_modified_on: datetime
:param operation_status: Gets operation status of deployment.
:type operation_status: object
:param post_deploy_approvals: Gets list of PostDeployApprovals.
:type post_deploy_approvals: list of :class:`ReleaseApproval <azure.devops.v7_1.release.models.ReleaseApproval>`
:param pre_deploy_approvals: Gets list of PreDeployApprovals.
:type pre_deploy_approvals: list of :class:`ReleaseApproval <azure.devops.v7_1.release.models.ReleaseApproval>`
:param project_reference: Gets or sets project reference.
:type project_reference: :class:`ProjectReference <azure.devops.v7_1.release.models.ProjectReference>`
:param queued_on: Gets the date on which deployment is queued.
:type queued_on: datetime
:param reason: Gets reason of deployment.
:type reason: object
:param release: Gets the reference of release.
:type release: :class:`ReleaseReference <azure.devops.v7_1.release.models.ReleaseReference>`
:param release_definition: Gets releaseDefinitionReference which specifies the reference of the release definition to which the deployment is associated.
:type release_definition: :class:`ReleaseDefinitionShallowReference <azure.devops.v7_1.release.models.ReleaseDefinitionShallowReference>`
:param release_environment: Gets releaseEnvironmentReference which specifies the reference of the release environment to which the deployment is associated.
:type release_environment: :class:`ReleaseEnvironmentShallowReference <azure.devops.v7_1.release.models.ReleaseEnvironmentShallowReference>`
:param requested_by: Gets the identity who requested.
:type requested_by: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param requested_for: Gets the identity for whom deployment is requested.
:type requested_for: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param scheduled_deployment_time: Gets the date on which deployment is scheduled.
:type scheduled_deployment_time: datetime
:param started_on: Gets the date on which deployment is started.
:type started_on: datetime
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'attempt': {'key': 'attempt', 'type': 'int'},
'completed_on': {'key': 'completedOn', 'type': 'iso-8601'},
'conditions': {'key': 'conditions', 'type': '[Condition]'},
'definition_environment_id': {'key': 'definitionEnvironmentId', 'type': 'int'},
'deployment_status': {'key': 'deploymentStatus', 'type': 'object'},
'id': {'key': 'id', 'type': 'int'},
'last_modified_by': {'key': 'lastModifiedBy', 'type': 'IdentityRef'},
'last_modified_on': {'key': 'lastModifiedOn', 'type': 'iso-8601'},
'operation_status': {'key': 'operationStatus', 'type': 'object'},
'post_deploy_approvals': {'key': 'postDeployApprovals', 'type': '[ReleaseApproval]'},
'pre_deploy_approvals': {'key': 'preDeployApprovals', 'type': '[ReleaseApproval]'},
'project_reference': {'key': 'projectReference', 'type': 'ProjectReference'},
'queued_on': {'key': 'queuedOn', 'type': 'iso-8601'},
'reason': {'key': 'reason', 'type': 'object'},
'release': {'key': 'release', 'type': 'ReleaseReference'},
'release_definition': {'key': 'releaseDefinition', 'type': 'ReleaseDefinitionShallowReference'},
'release_environment': {'key': 'releaseEnvironment', 'type': 'ReleaseEnvironmentShallowReference'},
'requested_by': {'key': 'requestedBy', 'type': 'IdentityRef'},
'requested_for': {'key': 'requestedFor', 'type': 'IdentityRef'},
'scheduled_deployment_time': {'key': 'scheduledDeploymentTime', 'type': 'iso-8601'},
'started_on': {'key': 'startedOn', 'type': 'iso-8601'}
}
def __init__(self, _links=None, attempt=None, completed_on=None, conditions=None, definition_environment_id=None, deployment_status=None, id=None, last_modified_by=None, last_modified_on=None, operation_status=None, post_deploy_approvals=None, pre_deploy_approvals=None, project_reference=None, queued_on=None, reason=None, release=None, release_definition=None, release_environment=None, requested_by=None, requested_for=None, scheduled_deployment_time=None, started_on=None):
super(Deployment, self).__init__()
self._links = _links
self.attempt = attempt
self.completed_on = completed_on
self.conditions = conditions
self.definition_environment_id = definition_environment_id
self.deployment_status = deployment_status
self.id = id
self.last_modified_by = last_modified_by
self.last_modified_on = last_modified_on
self.operation_status = operation_status
self.post_deploy_approvals = post_deploy_approvals
self.pre_deploy_approvals = pre_deploy_approvals
self.project_reference = project_reference
self.queued_on = queued_on
self.reason = reason
self.release = release
self.release_definition = release_definition
self.release_environment = release_environment
self.requested_by = requested_by
self.requested_for = requested_for
self.scheduled_deployment_time = scheduled_deployment_time
self.started_on = started_on
class DeploymentAttempt(Model):
"""
:param attempt: Deployment attempt.
:type attempt: int
:param deployment_id: ID of the deployment.
:type deployment_id: int
:param error_log: Error log to show any unexpected error that occurred during executing deploy step
:type error_log: str
:param has_started: Specifies whether deployment has started or not.
:type has_started: bool
:param id: ID of deployment.
:type id: int
:param issues: All the issues related to the deployment.
:type issues: list of :class:`Issue <azure.devops.v7_1.release.models.Issue>`
:param job:
:type job: :class:`ReleaseTask <azure.devops.v7_1.release.models.ReleaseTask>`
:param last_modified_by: Identity who last modified this deployment.
:type last_modified_by: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param last_modified_on: Time when this deployment last modified.
:type last_modified_on: datetime
:param operation_status: Deployment operation status.
:type operation_status: object
:param post_deployment_gates: Post deployment gates that executed in this deployment.
:type post_deployment_gates: :class:`ReleaseGates <azure.devops.v7_1.release.models.ReleaseGates>`
:param pre_deployment_gates: Pre deployment gates that executed in this deployment.
:type pre_deployment_gates: :class:`ReleaseGates <azure.devops.v7_1.release.models.ReleaseGates>`
:param queued_on: When this deployment queued on.
:type queued_on: datetime
:param reason: Reason for the deployment.
:type reason: object
:param release_deploy_phases: List of release deployphases executed in this deployment.
:type release_deploy_phases: list of :class:`ReleaseDeployPhase <azure.devops.v7_1.release.models.ReleaseDeployPhase>`
:param requested_by: Identity who requested this deployment.
:type requested_by: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param requested_for: Identity for this deployment requested.
:type requested_for: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param run_plan_id:
:type run_plan_id: str
:param status: status of the deployment.
:type status: object
:param tasks:
:type tasks: list of :class:`ReleaseTask <azure.devops.v7_1.release.models.ReleaseTask>`
"""
_attribute_map = {
'attempt': {'key': 'attempt', 'type': 'int'},
'deployment_id': {'key': 'deploymentId', 'type': 'int'},
'error_log': {'key': 'errorLog', 'type': 'str'},
'has_started': {'key': 'hasStarted', 'type': 'bool'},
'id': {'key': 'id', 'type': 'int'},
'issues': {'key': 'issues', 'type': '[Issue]'},
'job': {'key': 'job', 'type': 'ReleaseTask'},
'last_modified_by': {'key': 'lastModifiedBy', 'type': 'IdentityRef'},
'last_modified_on': {'key': 'lastModifiedOn', 'type': 'iso-8601'},
'operation_status': {'key': 'operationStatus', 'type': 'object'},
'post_deployment_gates': {'key': 'postDeploymentGates', 'type': 'ReleaseGates'},
'pre_deployment_gates': {'key': 'preDeploymentGates', 'type': 'ReleaseGates'},
'queued_on': {'key': 'queuedOn', 'type': 'iso-8601'},
'reason': {'key': 'reason', 'type': 'object'},
'release_deploy_phases': {'key': 'releaseDeployPhases', 'type': '[ReleaseDeployPhase]'},
'requested_by': {'key': 'requestedBy', 'type': 'IdentityRef'},
'requested_for': {'key': 'requestedFor', 'type': 'IdentityRef'},
'run_plan_id': {'key': 'runPlanId', 'type': 'str'},
'status': {'key': 'status', 'type': 'object'},
'tasks': {'key': 'tasks', 'type': '[ReleaseTask]'}
}
def __init__(self, attempt=None, deployment_id=None, error_log=None, has_started=None, id=None, issues=None, job=None, last_modified_by=None, last_modified_on=None, operation_status=None, post_deployment_gates=None, pre_deployment_gates=None, queued_on=None, reason=None, release_deploy_phases=None, requested_by=None, requested_for=None, run_plan_id=None, status=None, tasks=None):
super(DeploymentAttempt, self).__init__()
self.attempt = attempt
self.deployment_id = deployment_id
self.error_log = error_log
self.has_started = has_started
self.id = id
self.issues = issues
self.job = job
self.last_modified_by = last_modified_by
self.last_modified_on = last_modified_on
self.operation_status = operation_status
self.post_deployment_gates = post_deployment_gates
self.pre_deployment_gates = pre_deployment_gates
self.queued_on = queued_on
self.reason = reason
self.release_deploy_phases = release_deploy_phases
self.requested_by = requested_by
self.requested_for = requested_for
self.run_plan_id = run_plan_id
self.status = status
self.tasks = tasks
class DeploymentJob(Model):
"""
:param job: Parent task of all executed tasks.
:type job: :class:`ReleaseTask <azure.devops.v7_1.release.models.ReleaseTask>`
:param tasks: List of executed tasks with in job.
:type tasks: list of :class:`ReleaseTask <azure.devops.v7_1.release.models.ReleaseTask>`
"""
_attribute_map = {
'job': {'key': 'job', 'type': 'ReleaseTask'},
'tasks': {'key': 'tasks', 'type': '[ReleaseTask]'}
}
def __init__(self, job=None, tasks=None):
super(DeploymentJob, self).__init__()
self.job = job
self.tasks = tasks
class DeploymentQueryParameters(Model):
"""
:param artifact_source_id: Query deployments based specified artifact source id.
:type artifact_source_id: str
:param artifact_type_id: Query deployments based specified artifact type id.
:type artifact_type_id: str
:param artifact_versions: Query deployments based specified artifact versions.
:type artifact_versions: list of str
:param deployments_per_environment: Query deployments number of deployments per environment.
:type deployments_per_environment: int
:param deployment_status: Query deployment based on deployment status.
:type deployment_status: object
:param environments: Query deployments of specified environments.
:type environments: list of :class:`DefinitionEnvironmentReference <azure.devops.v7_1.release.models.DefinitionEnvironmentReference>`
:param expands: Query deployments based specified expands.
:type expands: object
:param is_deleted: Specify deleted deployments should return or not.
:type is_deleted: bool
:param latest_deployments_only:
:type latest_deployments_only: bool
:param max_deployments_per_environment:
:type max_deployments_per_environment: int
:param max_modified_time:
:type max_modified_time: datetime
:param min_modified_time:
:type min_modified_time: datetime
:param operation_status: Query deployment based on deployment operation status.
:type operation_status: object
:param query_order:
:type query_order: object
:param query_type: Query deployments based query type.
:type query_type: object
:param source_branch: Query deployments based specified source branch.
:type source_branch: str
"""
_attribute_map = {
'artifact_source_id': {'key': 'artifactSourceId', 'type': 'str'},
'artifact_type_id': {'key': 'artifactTypeId', 'type': 'str'},
'artifact_versions': {'key': 'artifactVersions', 'type': '[str]'},
'deployments_per_environment': {'key': 'deploymentsPerEnvironment', 'type': 'int'},
'deployment_status': {'key': 'deploymentStatus', 'type': 'object'},
'environments': {'key': 'environments', 'type': '[DefinitionEnvironmentReference]'},
'expands': {'key': 'expands', 'type': 'object'},
'is_deleted': {'key': 'isDeleted', 'type': 'bool'},
'latest_deployments_only': {'key': 'latestDeploymentsOnly', 'type': 'bool'},
'max_deployments_per_environment': {'key': 'maxDeploymentsPerEnvironment', 'type': 'int'},
'max_modified_time': {'key': 'maxModifiedTime', 'type': 'iso-8601'},
'min_modified_time': {'key': 'minModifiedTime', 'type': 'iso-8601'},
'operation_status': {'key': 'operationStatus', 'type': 'object'},
'query_order': {'key': 'queryOrder', 'type': 'object'},
'query_type': {'key': 'queryType', 'type': 'object'},
'source_branch': {'key': 'sourceBranch', 'type': 'str'}
}
def __init__(self, artifact_source_id=None, artifact_type_id=None, artifact_versions=None, deployments_per_environment=None, deployment_status=None, environments=None, expands=None, is_deleted=None, latest_deployments_only=None, max_deployments_per_environment=None, max_modified_time=None, min_modified_time=None, operation_status=None, query_order=None, query_type=None, source_branch=None):
super(DeploymentQueryParameters, self).__init__()
self.artifact_source_id = artifact_source_id
self.artifact_type_id = artifact_type_id
self.artifact_versions = artifact_versions
self.deployments_per_environment = deployments_per_environment
self.deployment_status = deployment_status
self.environments = environments
self.expands = expands
self.is_deleted = is_deleted
self.latest_deployments_only = latest_deployments_only
self.max_deployments_per_environment = max_deployments_per_environment
self.max_modified_time = max_modified_time
self.min_modified_time = min_modified_time
self.operation_status = operation_status
self.query_order = query_order
self.query_type = query_type
self.source_branch = source_branch
class EmailRecipients(Model):
"""
:param email_addresses: List of email addresses.
:type email_addresses: list of str
:param tfs_ids: List of TFS IDs guids.
:type tfs_ids: list of str
"""
_attribute_map = {
'email_addresses': {'key': 'emailAddresses', 'type': '[str]'},
'tfs_ids': {'key': 'tfsIds', 'type': '[str]'}
}
def __init__(self, email_addresses=None, tfs_ids=None):
super(EmailRecipients, self).__init__()
self.email_addresses = email_addresses
self.tfs_ids = tfs_ids
class EnvironmentExecutionPolicy(Model):
"""
Defines policy on environment queuing at Release Management side queue. We will send to Environment Runner [creating pre-deploy and other steps] only when the policies mentioned are satisfied.
:param concurrency_count: This policy decides, how many environments would be with Environment Runner.
:type concurrency_count: int
:param queue_depth_count: Queue depth in the EnvironmentQueue table, this table keeps the environment entries till Environment Runner is free [as per it's policy] to take another environment for running.
:type queue_depth_count: int
"""
_attribute_map = {
'concurrency_count': {'key': 'concurrencyCount', 'type': 'int'},
'queue_depth_count': {'key': 'queueDepthCount', 'type': 'int'}
}
def __init__(self, concurrency_count=None, queue_depth_count=None):
super(EnvironmentExecutionPolicy, self).__init__()
self.concurrency_count = concurrency_count
self.queue_depth_count = queue_depth_count
class EnvironmentOptions(Model):
"""
:param auto_link_work_items: Gets and sets as the auto link workitems or not.
:type auto_link_work_items: bool
:param badge_enabled: Gets and sets as the badge enabled or not.
:type badge_enabled: bool
:param email_notification_type:
:type email_notification_type: str
:param email_recipients:
:type email_recipients: str
:param enable_access_token:
:type enable_access_token: bool
:param publish_deployment_status: Gets and sets as the publish deployment status or not.
:type publish_deployment_status: bool
:param pull_request_deployment_enabled: Gets and sets as the.pull request deployment enabled or not.
:type pull_request_deployment_enabled: bool
:param skip_artifacts_download:
:type skip_artifacts_download: bool
:param timeout_in_minutes:
:type timeout_in_minutes: int
"""
_attribute_map = {
'auto_link_work_items': {'key': 'autoLinkWorkItems', 'type': 'bool'},
'badge_enabled': {'key': 'badgeEnabled', 'type': 'bool'},
'email_notification_type': {'key': 'emailNotificationType', 'type': 'str'},
'email_recipients': {'key': 'emailRecipients', 'type': 'str'},
'enable_access_token': {'key': 'enableAccessToken', 'type': 'bool'},
'publish_deployment_status': {'key': 'publishDeploymentStatus', 'type': 'bool'},
'pull_request_deployment_enabled': {'key': 'pullRequestDeploymentEnabled', 'type': 'bool'},
'skip_artifacts_download': {'key': 'skipArtifactsDownload', 'type': 'bool'},
'timeout_in_minutes': {'key': 'timeoutInMinutes', 'type': 'int'}
}
def __init__(self, auto_link_work_items=None, badge_enabled=None, email_notification_type=None, email_recipients=None, enable_access_token=None, publish_deployment_status=None, pull_request_deployment_enabled=None, skip_artifacts_download=None, timeout_in_minutes=None):
super(EnvironmentOptions, self).__init__()
self.auto_link_work_items = auto_link_work_items
self.badge_enabled = badge_enabled
self.email_notification_type = email_notification_type
self.email_recipients = email_recipients
self.enable_access_token = enable_access_token
self.publish_deployment_status = publish_deployment_status
self.pull_request_deployment_enabled = pull_request_deployment_enabled
self.skip_artifacts_download = skip_artifacts_download
self.timeout_in_minutes = timeout_in_minutes
class EnvironmentRetentionPolicy(Model):
"""
:param days_to_keep: Gets and sets the number of days to keep environment.
:type days_to_keep: int
:param releases_to_keep: Gets and sets the number of releases to keep.
:type releases_to_keep: int
:param retain_build: Gets and sets as the build to be retained or not.
:type retain_build: bool
"""
_attribute_map = {
'days_to_keep': {'key': 'daysToKeep', 'type': 'int'},
'releases_to_keep': {'key': 'releasesToKeep', 'type': 'int'},
'retain_build': {'key': 'retainBuild', 'type': 'bool'}
}
def __init__(self, days_to_keep=None, releases_to_keep=None, retain_build=None):
super(EnvironmentRetentionPolicy, self).__init__()
self.days_to_keep = days_to_keep
self.releases_to_keep = releases_to_keep
self.retain_build = retain_build
class EnvironmentTrigger(Model):
"""
:param definition_environment_id: Definition environment ID on which this trigger applicable.
:type definition_environment_id: int
:param release_definition_id: ReleaseDefinition ID on which this trigger applicable.
:type release_definition_id: int
:param trigger_content: Gets or sets the trigger content.
:type trigger_content: str
:param trigger_type: Gets or sets the trigger type.
:type trigger_type: object
"""
_attribute_map = {
'definition_environment_id': {'key': 'definitionEnvironmentId', 'type': 'int'},
'release_definition_id': {'key': 'releaseDefinitionId', 'type': 'int'},
'trigger_content': {'key': 'triggerContent', 'type': 'str'},
'trigger_type': {'key': 'triggerType', 'type': 'object'}
}
def __init__(self, definition_environment_id=None, release_definition_id=None, trigger_content=None, trigger_type=None):
super(EnvironmentTrigger, self).__init__()
self.definition_environment_id = definition_environment_id
self.release_definition_id = release_definition_id
self.trigger_content = trigger_content
self.trigger_type = trigger_type
class FavoriteItem(Model):
"""
Class to represent favorite entry.
:param data: Application specific data for the entry.
:type data: str
:param id: Unique Id of the entry.
:type id: str
:param name: Display text for favorite entry.
:type name: str
:param type: Application specific favorite entry type. Empty or Null represents that Favorite item is a Folder.
:type type: str
"""
_attribute_map = {
'data': {'key': 'data', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'}
}
def __init__(self, data=None, id=None, name=None, type=None):
super(FavoriteItem, self).__init__()
self.data = data
self.id = id
self.name = name
self.type = type
class Folder(Model):
"""
:param created_by: Identity who created this folder.
:type created_by: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param created_on: Time when this folder created.
:type created_on: datetime
:param description: Description of the folder.
:type description: str
:param last_changed_by: Identity who last changed this folder.
:type last_changed_by: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param last_changed_date: Time when this folder last changed.
:type last_changed_date: datetime
:param path: path of the folder.
:type path: str
"""
_attribute_map = {
'created_by': {'key': 'createdBy', 'type': 'IdentityRef'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'description': {'key': 'description', 'type': 'str'},
'last_changed_by': {'key': 'lastChangedBy', 'type': 'IdentityRef'},
'last_changed_date': {'key': 'lastChangedDate', 'type': 'iso-8601'},
'path': {'key': 'path', 'type': 'str'}
}
def __init__(self, created_by=None, created_on=None, description=None, last_changed_by=None, last_changed_date=None, path=None):
super(Folder, self).__init__()
self.created_by = created_by
self.created_on = created_on
self.description = description
self.last_changed_by = last_changed_by
self.last_changed_date = last_changed_date
self.path = path
class GateUpdateMetadata(Model):
"""
:param comment: Comment.
:type comment: str
:param gates_to_ignore: Name of gate to be ignored.
:type gates_to_ignore: list of str
"""
_attribute_map = {
'comment': {'key': 'comment', 'type': 'str'},
'gates_to_ignore': {'key': 'gatesToIgnore', 'type': '[str]'}
}
def __init__(self, comment=None, gates_to_ignore=None):
super(GateUpdateMetadata, self).__init__()
self.comment = comment
self.gates_to_ignore = gates_to_ignore
class GraphSubjectBase(Model):
"""
:param _links: This field contains zero or more interesting links about the graph subject. These links may be invoked to obtain additional relationships or more detailed information about this graph subject.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.microsoft._visual_studio._services._web_api.models.ReferenceLinks>`
:param descriptor: The descriptor is the primary way to reference the graph subject while the system is running. This field will uniquely identify the same graph subject across both Accounts and Organizations.
:type descriptor: str
:param display_name: This is the non-unique display name of the graph subject. To change this field, you must alter its value in the source provider.
:type display_name: str
:param url: This url is the full route to the source resource of this graph subject.
:type url: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, _links=None, descriptor=None, display_name=None, url=None):
super(GraphSubjectBase, self).__init__()
self._links = _links
self.descriptor = descriptor
self.display_name = display_name
self.url = url
class Change(Model):
"""
Represents a change associated with a build.
:param author: The author of the change.
:type author: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param display_uri: The location of a user-friendly representation of the resource.
:type display_uri: str
:param change_type: The type of source. "TfsVersionControl", "TfsGit", etc.
:type change_type: str
:param id: Something that identifies the change. For a commit, this would be the SHA1. For a TFVC changeset, this would be the changeset id.
:type id: str
:param location: The location of the full representation of the resource.
:type location: str
:param message: A description of the change. This might be a commit message or changeset description.
:type message: str
:param pushed_by: The person or process that pushed the change.
:type pushed_by: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param pusher: The person or process that pushed the change.
:type pusher: str
:param timestamp: A timestamp for the change.
:type timestamp: datetime
"""
_attribute_map = {
'author': {'key': 'author', 'type': 'IdentityRef'},
'display_uri': {'key': 'displayUri', 'type': 'str'},
'change_type': {'key': 'changeType', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'pushed_by': {'key': 'pushedBy', 'type': 'IdentityRef'},
'pusher': {'key': 'pusher', 'type': 'str'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'}
}
def __init__(self, author=None, display_uri=None, change_type=None, id=None, location=None, message=None, pushed_by=None, pusher=None, timestamp=None):
super(Change, self).__init__()
self.author = author
self.display_uri = display_uri
self.change_type = change_type
self.id = id
self.location = location
self.message = message
self.pushed_by = pushed_by
self.pusher = pusher
self.timestamp = timestamp
class IdentityRef(GraphSubjectBase):
"""
:param _links: This field contains zero or more interesting links about the graph subject. These links may be invoked to obtain additional relationships or more detailed information about this graph subject.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.microsoft._visual_studio._services._web_api.models.ReferenceLinks>`
:param descriptor: The descriptor is the primary way to reference the graph subject while the system is running. This field will uniquely identify the same graph subject across both Accounts and Organizations.
:type descriptor: str
:param display_name: This is the non-unique display name of the graph subject. To change this field, you must alter its value in the source provider.
:type display_name: str
:param url: This url is the full route to the source resource of this graph subject.
:type url: str
:param directory_alias: Deprecated - Can be retrieved by querying the Graph user referenced in the "self" entry of the IdentityRef "_links" dictionary
:type directory_alias: str
:param id:
:type id: str
:param image_url: Deprecated - Available in the "avatar" entry of the IdentityRef "_links" dictionary
:type image_url: str
:param inactive: Deprecated - Can be retrieved by querying the Graph membership state referenced in the "membershipState" entry of the GraphUser "_links" dictionary
:type inactive: bool
:param is_aad_identity: Deprecated - Can be inferred from the subject type of the descriptor (Descriptor.IsAadUserType/Descriptor.IsAadGroupType)
:type is_aad_identity: bool
:param is_container: Deprecated - Can be inferred from the subject type of the descriptor (Descriptor.IsGroupType)
:type is_container: bool
:param is_deleted_in_origin:
:type is_deleted_in_origin: bool
:param profile_url: Deprecated - not in use in most preexisting implementations of ToIdentityRef
:type profile_url: str
:param unique_name: Deprecated - use Domain+PrincipalName instead
:type unique_name: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'directory_alias': {'key': 'directoryAlias', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'image_url': {'key': 'imageUrl', 'type': 'str'},
'inactive': {'key': 'inactive', 'type': 'bool'},
'is_aad_identity': {'key': 'isAadIdentity', 'type': 'bool'},
'is_container': {'key': 'isContainer', 'type': 'bool'},
'is_deleted_in_origin': {'key': 'isDeletedInOrigin', 'type': 'bool'},
'profile_url': {'key': 'profileUrl', 'type': 'str'},
'unique_name': {'key': 'uniqueName', 'type': 'str'}
}
def __init__(self, _links=None, descriptor=None, display_name=None, url=None, directory_alias=None, id=None, image_url=None, inactive=None, is_aad_identity=None, is_container=None, is_deleted_in_origin=None, profile_url=None, unique_name=None):
super(IdentityRef, self).__init__(_links=_links, descriptor=descriptor, display_name=display_name, url=url)
self.directory_alias = directory_alias
self.id = id
self.image_url = image_url
self.inactive = inactive
self.is_aad_identity = is_aad_identity
self.is_container = is_container
self.is_deleted_in_origin = is_deleted_in_origin
self.profile_url = profile_url
self.unique_name = unique_name
class IgnoredGate(Model):
"""
:param last_modified_on: Gets the date on which gate is last ignored.
:type last_modified_on: datetime
:param name: Name of gate ignored.
:type name: str
"""
_attribute_map = {
'last_modified_on': {'key': 'lastModifiedOn', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, last_modified_on=None, name=None):
super(IgnoredGate, self).__init__()
self.last_modified_on = last_modified_on
self.name = name
class InputDescriptor(Model):
"""
Describes an input for subscriptions.
:param dependency_input_ids: The ids of all inputs that the value of this input is dependent on.
:type dependency_input_ids: list of str
:param description: Description of what this input is used for
:type description: str
:param group_name: The group localized name to which this input belongs and can be shown as a header for the container that will include all the inputs in the group.
:type group_name: str
:param has_dynamic_value_information: If true, the value information for this input is dynamic and should be fetched when the value of dependency inputs change.
:type has_dynamic_value_information: bool
:param id: Identifier for the subscription input
:type id: str
:param input_mode: Mode in which the value of this input should be entered
:type input_mode: object
:param is_confidential: Gets whether this input is confidential, such as for a password or application key
:type is_confidential: bool
:param name: Localized name which can be shown as a label for the subscription input
:type name: str
:param properties: Custom properties for the input which can be used by the service provider
:type properties: dict
:param type: Underlying data type for the input value. When this value is specified, InputMode, Validation and Values are optional.
:type type: str
:param use_in_default_description: Gets whether this input is included in the default generated action description.
:type use_in_default_description: bool
:param validation: Information to use to validate this input's value
:type validation: :class:`InputValidation <azure.devops.v7_1.microsoft._visual_studio._services._web_api.models.InputValidation>`
:param value_hint: A hint for input value. It can be used in the UI as the input placeholder.
:type value_hint: str
:param values: Information about possible values for this input
:type values: :class:`InputValues <azure.devops.v7_1.microsoft._visual_studio._services._web_api.models.InputValues>`
"""
_attribute_map = {
'dependency_input_ids': {'key': 'dependencyInputIds', 'type': '[str]'},
'description': {'key': 'description', 'type': 'str'},
'group_name': {'key': 'groupName', 'type': 'str'},
'has_dynamic_value_information': {'key': 'hasDynamicValueInformation', 'type': 'bool'},
'id': {'key': 'id', 'type': 'str'},
'input_mode': {'key': 'inputMode', 'type': 'object'},
'is_confidential': {'key': 'isConfidential', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'use_in_default_description': {'key': 'useInDefaultDescription', 'type': 'bool'},
'validation': {'key': 'validation', 'type': 'InputValidation'},
'value_hint': {'key': 'valueHint', 'type': 'str'},
'values': {'key': 'values', 'type': 'InputValues'}
}
def __init__(self, dependency_input_ids=None, description=None, group_name=None, has_dynamic_value_information=None, id=None, input_mode=None, is_confidential=None, name=None, properties=None, type=None, use_in_default_description=None, validation=None, value_hint=None, values=None):
super(InputDescriptor, self).__init__()
self.dependency_input_ids = dependency_input_ids
self.description = description
self.group_name = group_name
self.has_dynamic_value_information = has_dynamic_value_information
self.id = id
self.input_mode = input_mode
self.is_confidential = is_confidential
self.name = name
self.properties = properties
self.type = type
self.use_in_default_description = use_in_default_description
self.validation = validation
self.value_hint = value_hint
self.values = values
class InputValidation(Model):
"""
Describes what values are valid for a subscription input
:param data_type: Gets or sets the data type to validate.
:type data_type: object
:param is_required: Gets or sets if this is a required field.
:type is_required: bool
:param max_length: Gets or sets the maximum length of this descriptor.
:type max_length: int
:param max_value: Gets or sets the minimum value for this descriptor.
:type max_value: decimal
:param min_length: Gets or sets the minimum length of this descriptor.
:type min_length: int
:param min_value: Gets or sets the minimum value for this descriptor.
:type min_value: decimal
:param pattern: Gets or sets the pattern to validate.
:type pattern: str
:param pattern_mismatch_error_message: Gets or sets the error on pattern mismatch.
:type pattern_mismatch_error_message: str
"""
_attribute_map = {
'data_type': {'key': 'dataType', 'type': 'object'},
'is_required': {'key': 'isRequired', 'type': 'bool'},
'max_length': {'key': 'maxLength', 'type': 'int'},
'max_value': {'key': 'maxValue', 'type': 'decimal'},
'min_length': {'key': 'minLength', 'type': 'int'},
'min_value': {'key': 'minValue', 'type': 'decimal'},
'pattern': {'key': 'pattern', 'type': 'str'},
'pattern_mismatch_error_message': {'key': 'patternMismatchErrorMessage', 'type': 'str'}
}
def __init__(self, data_type=None, is_required=None, max_length=None, max_value=None, min_length=None, min_value=None, pattern=None, pattern_mismatch_error_message=None):
super(InputValidation, self).__init__()
self.data_type = data_type
self.is_required = is_required
self.max_length = max_length
self.max_value = max_value
self.min_length = min_length
self.min_value = min_value
self.pattern = pattern
self.pattern_mismatch_error_message = pattern_mismatch_error_message
class InputValue(Model):
"""
Information about a single value for an input
:param data: Any other data about this input
:type data: dict
:param display_value: The text to show for the display of this value
:type display_value: str
:param value: The value to store for this input
:type value: str
"""
_attribute_map = {
'data': {'key': 'data', 'type': '{object}'},
'display_value': {'key': 'displayValue', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'}
}
def __init__(self, data=None, display_value=None, value=None):
super(InputValue, self).__init__()
self.data = data
self.display_value = display_value
self.value = value
class InputValues(Model):
"""
Information about the possible/allowed values for a given subscription input
:param default_value: The default value to use for this input
:type default_value: str
:param error: Errors encountered while computing dynamic values.
:type error: :class:`InputValuesError <azure.devops.v7_1.microsoft._visual_studio._services._web_api.models.InputValuesError>`
:param input_id: The id of the input
:type input_id: str
:param is_disabled: Should this input be disabled
:type is_disabled: bool
:param is_limited_to_possible_values: Should the value be restricted to one of the values in the PossibleValues (True) or are the values in PossibleValues just a suggestion (False)
:type is_limited_to_possible_values: bool
:param is_read_only: Should this input be made read-only
:type is_read_only: bool
:param possible_values: Possible values that this input can take
:type possible_values: list of :class:`InputValue <azure.devops.v7_1.microsoft._visual_studio._services._web_api.models.InputValue>`
"""
_attribute_map = {
'default_value': {'key': 'defaultValue', 'type': 'str'},
'error': {'key': 'error', 'type': 'InputValuesError'},
'input_id': {'key': 'inputId', 'type': 'str'},
'is_disabled': {'key': 'isDisabled', 'type': 'bool'},
'is_limited_to_possible_values': {'key': 'isLimitedToPossibleValues', 'type': 'bool'},
'is_read_only': {'key': 'isReadOnly', 'type': 'bool'},
'possible_values': {'key': 'possibleValues', 'type': '[InputValue]'}
}
def __init__(self, default_value=None, error=None, input_id=None, is_disabled=None, is_limited_to_possible_values=None, is_read_only=None, possible_values=None):
super(InputValues, self).__init__()
self.default_value = default_value
self.error = error
self.input_id = input_id
self.is_disabled = is_disabled
self.is_limited_to_possible_values = is_limited_to_possible_values
self.is_read_only = is_read_only
self.possible_values = possible_values
class InputValuesError(Model):
"""
Error information related to a subscription input value.
:param message: The error message.
:type message: str
"""
_attribute_map = {
'message': {'key': 'message', 'type': 'str'}
}
def __init__(self, message=None):
super(InputValuesError, self).__init__()
self.message = message
class InputValuesQuery(Model):
"""
:param current_values:
:type current_values: dict
:param input_values: The input values to return on input, and the result from the consumer on output.
:type input_values: list of :class:`InputValues <azure.devops.v7_1.microsoft._visual_studio._services._web_api.models.InputValues>`
:param resource: Subscription containing information about the publisher/consumer and the current input values
:type resource: object
"""
_attribute_map = {
'current_values': {'key': 'currentValues', 'type': '{str}'},
'input_values': {'key': 'inputValues', 'type': '[InputValues]'},
'resource': {'key': 'resource', 'type': 'object'}
}
def __init__(self, current_values=None, input_values=None, resource=None):
super(InputValuesQuery, self).__init__()
self.current_values = current_values
self.input_values = input_values
self.resource = resource
class Issue(Model):
"""
:param data: Issue data.
:type data: dict
:param issue_type: Issue type, for example error, warning or info.
:type issue_type: str
:param message: Issue message.
:type message: str
"""
_attribute_map = {
'data': {'key': 'data', 'type': '{str}'},
'issue_type': {'key': 'issueType', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'}
}
def __init__(self, data=None, issue_type=None, message=None):
super(Issue, self).__init__()
self.data = data
self.issue_type = issue_type
self.message = message
class MailMessage(Model):
"""
:param body: Body of mail.
:type body: str
:param cc: Mail CC recipients.
:type cc: :class:`EmailRecipients <azure.devops.v7_1.release.models.EmailRecipients>`
:param in_reply_to: Reply to.
:type in_reply_to: str
:param message_id: Message ID of the mail.
:type message_id: str
:param reply_by: Data when should be replied to mail.
:type reply_by: datetime
:param reply_to: Reply to Email recipients.
:type reply_to: :class:`EmailRecipients <azure.devops.v7_1.release.models.EmailRecipients>`
:param sections: List of mail section types.
:type sections: list of MailSectionType
:param sender_type: Mail sender type.
:type sender_type: object
:param subject: Subject of the mail.
:type subject: str
:param to: Mail To recipients.
:type to: :class:`EmailRecipients <azure.devops.v7_1.release.models.EmailRecipients>`
"""
_attribute_map = {
'body': {'key': 'body', 'type': 'str'},
'cc': {'key': 'cc', 'type': 'EmailRecipients'},
'in_reply_to': {'key': 'inReplyTo', 'type': 'str'},
'message_id': {'key': 'messageId', 'type': 'str'},
'reply_by': {'key': 'replyBy', 'type': 'iso-8601'},
'reply_to': {'key': 'replyTo', 'type': 'EmailRecipients'},
'sections': {'key': 'sections', 'type': '[object]'},
'sender_type': {'key': 'senderType', 'type': 'object'},
'subject': {'key': 'subject', 'type': 'str'},
'to': {'key': 'to', 'type': 'EmailRecipients'}
}
def __init__(self, body=None, cc=None, in_reply_to=None, message_id=None, reply_by=None, reply_to=None, sections=None, sender_type=None, subject=None, to=None):
super(MailMessage, self).__init__()
self.body = body
self.cc = cc
self.in_reply_to = in_reply_to
self.message_id = message_id
self.reply_by = reply_by
self.reply_to = reply_to
self.sections = sections
self.sender_type = sender_type
self.subject = subject
self.to = to
class ManualIntervention(Model):
"""
:param approver: Gets or sets the identity who should approve.
:type approver: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param comments: Gets or sets comments for approval.
:type comments: str
:param created_on: Gets date on which it got created.
:type created_on: datetime
:param id: Gets the unique identifier for manual intervention.
:type id: int
:param instructions: Gets or sets instructions for approval.
:type instructions: str
:param modified_on: Gets date on which it got modified.
:type modified_on: datetime
:param name: Gets or sets the name.
:type name: str
:param release: Gets releaseReference for manual intervention.
:type release: :class:`ReleaseShallowReference <azure.devops.v7_1.release.models.ReleaseShallowReference>`
:param release_definition: Gets releaseDefinitionReference for manual intervention.
:type release_definition: :class:`ReleaseDefinitionShallowReference <azure.devops.v7_1.release.models.ReleaseDefinitionShallowReference>`
:param release_environment: Gets releaseEnvironmentReference for manual intervention.
:type release_environment: :class:`ReleaseEnvironmentShallowReference <azure.devops.v7_1.release.models.ReleaseEnvironmentShallowReference>`
:param status: Gets or sets the status of the manual intervention.
:type status: object
:param task_instance_id: Get task instance identifier.
:type task_instance_id: str
:param url: Gets url to access the manual intervention.
:type url: str
"""
_attribute_map = {
'approver': {'key': 'approver', 'type': 'IdentityRef'},
'comments': {'key': 'comments', 'type': 'str'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'id': {'key': 'id', 'type': 'int'},
'instructions': {'key': 'instructions', 'type': 'str'},
'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'release': {'key': 'release', 'type': 'ReleaseShallowReference'},
'release_definition': {'key': 'releaseDefinition', 'type': 'ReleaseDefinitionShallowReference'},
'release_environment': {'key': 'releaseEnvironment', 'type': 'ReleaseEnvironmentShallowReference'},
'status': {'key': 'status', 'type': 'object'},
'task_instance_id': {'key': 'taskInstanceId', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, approver=None, comments=None, created_on=None, id=None, instructions=None, modified_on=None, name=None, release=None, release_definition=None, release_environment=None, status=None, task_instance_id=None, url=None):
super(ManualIntervention, self).__init__()
self.approver = approver
self.comments = comments
self.created_on = created_on
self.id = id
self.instructions = instructions
self.modified_on = modified_on
self.name = name
self.release = release
self.release_definition = release_definition
self.release_environment = release_environment
self.status = status
self.task_instance_id = task_instance_id
self.url = url
class ManualInterventionUpdateMetadata(Model):
"""
:param comment: Sets the comment for manual intervention update.
:type comment: str
:param status: Sets the status of the manual intervention.
:type status: object
"""
_attribute_map = {
'comment': {'key': 'comment', 'type': 'str'},
'status': {'key': 'status', 'type': 'object'}
}
def __init__(self, comment=None, status=None):
super(ManualInterventionUpdateMetadata, self).__init__()
self.comment = comment
self.status = status
class Metric(Model):
"""
:param name: Name of the Metric.
:type name: str
:param value: Value of the Metric.
:type value: int
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'int'}
}
def __init__(self, name=None, value=None):
super(Metric, self).__init__()
self.name = name
self.value = value
class OrgPipelineReleaseSettings(Model):
"""
:param has_manage_pipeline_policies_permission: Defines whether user can manage pipeline settings.
:type has_manage_pipeline_policies_permission: bool
:param org_enforce_job_auth_scope: EnforceJobAuthScope setting at organisaion level. If enabled, scope of access for all release pipelines in the organisation reduces to the current project.
:type org_enforce_job_auth_scope: bool
"""
_attribute_map = {
'has_manage_pipeline_policies_permission': {'key': 'hasManagePipelinePoliciesPermission', 'type': 'bool'},
'org_enforce_job_auth_scope': {'key': 'orgEnforceJobAuthScope', 'type': 'bool'}
}
def __init__(self, has_manage_pipeline_policies_permission=None, org_enforce_job_auth_scope=None):
super(OrgPipelineReleaseSettings, self).__init__()
self.has_manage_pipeline_policies_permission = has_manage_pipeline_policies_permission
self.org_enforce_job_auth_scope = org_enforce_job_auth_scope
class OrgPipelineReleaseSettingsUpdateParameters(Model):
"""
:param org_enforce_job_auth_scope: EnforceJobAuthScope setting at organisaion level. If enabled, scope of access for all release pipelines in the organisation reduces to the current project.
:type org_enforce_job_auth_scope: bool
"""
_attribute_map = {
'org_enforce_job_auth_scope': {'key': 'orgEnforceJobAuthScope', 'type': 'bool'}
}
def __init__(self, org_enforce_job_auth_scope=None):
super(OrgPipelineReleaseSettingsUpdateParameters, self).__init__()
self.org_enforce_job_auth_scope = org_enforce_job_auth_scope
class PipelineProcess(Model):
"""
:param type: Pipeline process type.
:type type: object
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'object'}
}
def __init__(self, type=None):
super(PipelineProcess, self).__init__()
self.type = type
class ProcessParameters(Model):
"""
:param data_source_bindings:
:type data_source_bindings: list of :class:`DataSourceBindingBase <azure.devops.v7_1.microsoft._team_foundation._distributed_task._common._contracts.models.DataSourceBindingBase>`
:param inputs:
:type inputs: list of :class:`TaskInputDefinitionBase <azure.devops.v7_1.microsoft._team_foundation._distributed_task._common._contracts.models.TaskInputDefinitionBase>`
:param source_definitions:
:type source_definitions: list of :class:`TaskSourceDefinitionBase <azure.devops.v7_1.microsoft._team_foundation._distributed_task._common._contracts.models.TaskSourceDefinitionBase>`
"""
_attribute_map = {
'data_source_bindings': {'key': 'dataSourceBindings', 'type': '[DataSourceBindingBase]'},
'inputs': {'key': 'inputs', 'type': '[TaskInputDefinitionBase]'},
'source_definitions': {'key': 'sourceDefinitions', 'type': '[TaskSourceDefinitionBase]'}
}
def __init__(self, data_source_bindings=None, inputs=None, source_definitions=None):
super(ProcessParameters, self).__init__()
self.data_source_bindings = data_source_bindings
self.inputs = inputs
self.source_definitions = source_definitions
class ProjectPipelineReleaseSettings(Model):
"""
:param enforce_job_auth_scope: EnforceJobAuthScope setting at project level. If enabled, scope of access for all release pipelines reduces to the current project.
:type enforce_job_auth_scope: bool
:param has_manage_settings_permission: Defines whether user can manage pipeline settings.
:type has_manage_settings_permission: bool
:param org_enforce_job_auth_scope: EnforceJobAuthScope setting at organisaion level. If enabled, scope of access for all release pipelines in the organisation reduces to the current project.
:type org_enforce_job_auth_scope: bool
:param public_project: Defines whether project is public.
:type public_project: bool
"""
_attribute_map = {
'enforce_job_auth_scope': {'key': 'enforceJobAuthScope', 'type': 'bool'},
'has_manage_settings_permission': {'key': 'hasManageSettingsPermission', 'type': 'bool'},
'org_enforce_job_auth_scope': {'key': 'orgEnforceJobAuthScope', 'type': 'bool'},
'public_project': {'key': 'publicProject', 'type': 'bool'}
}
def __init__(self, enforce_job_auth_scope=None, has_manage_settings_permission=None, org_enforce_job_auth_scope=None, public_project=None):
super(ProjectPipelineReleaseSettings, self).__init__()
self.enforce_job_auth_scope = enforce_job_auth_scope
self.has_manage_settings_permission = has_manage_settings_permission
self.org_enforce_job_auth_scope = org_enforce_job_auth_scope
self.public_project = public_project
class ProjectPipelineReleaseSettingsUpdateParameters(Model):
"""
:param enforce_job_auth_scope: EnforceJobAuthScope setting at project level. If enabled, scope of access for all release pipelines reduces to the current project.
:type enforce_job_auth_scope: bool
"""
_attribute_map = {
'enforce_job_auth_scope': {'key': 'enforceJobAuthScope', 'type': 'bool'}
}
def __init__(self, enforce_job_auth_scope=None):
super(ProjectPipelineReleaseSettingsUpdateParameters, self).__init__()
self.enforce_job_auth_scope = enforce_job_auth_scope
class ProjectReference(Model):
"""
:param id: Gets the unique identifier of this field.
:type id: str
:param name: Gets name of project.
:type name: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, id=None, name=None):
super(ProjectReference, self).__init__()
self.id = id
self.name = name
class QueuedReleaseData(Model):
"""
:param project_id: Project ID of the release.
:type project_id: str
:param queue_position: Release queue position.
:type queue_position: int
:param release_id: Queued release ID.
:type release_id: int
"""
_attribute_map = {
'project_id': {'key': 'projectId', 'type': 'str'},
'queue_position': {'key': 'queuePosition', 'type': 'int'},
'release_id': {'key': 'releaseId', 'type': 'int'}
}
def __init__(self, project_id=None, queue_position=None, release_id=None):
super(QueuedReleaseData, self).__init__()
self.project_id = project_id
self.queue_position = queue_position
self.release_id = release_id
class ReferenceLinks(Model):
"""
The class to represent a collection of REST reference links.
:param links: The readonly view of the links. Because Reference links are readonly, we only want to expose them as read only.
:type links: dict
"""
_attribute_map = {
'links': {'key': 'links', 'type': '{object}'}
}
def __init__(self, links=None):
super(ReferenceLinks, self).__init__()
self.links = links
class Release(Model):
"""
:param _links: Gets links to access the release.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.release.models.ReferenceLinks>`
:param artifacts: Gets or sets the list of artifacts.
:type artifacts: list of :class:`Artifact <azure.devops.v7_1.release.models.Artifact>`
:param comment: Gets or sets comment.
:type comment: str
:param created_by: Gets or sets the identity who created.
:type created_by: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param created_for: Gets or sets the identity for whom release was created.
:type created_for: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param created_on: Gets date on which it got created.
:type created_on: datetime
:param definition_snapshot_revision: Gets revision number of definition snapshot.
:type definition_snapshot_revision: int
:param description: Gets or sets description of release.
:type description: str
:param environments: Gets list of environments.
:type environments: list of :class:`ReleaseEnvironment <azure.devops.v7_1.release.models.ReleaseEnvironment>`
:param id: Gets the unique identifier of this field.
:type id: int
:param keep_forever: Whether to exclude the release from retention policies.
:type keep_forever: bool
:param logs_container_url: Gets logs container url.
:type logs_container_url: str
:param modified_by: Gets or sets the identity who modified.
:type modified_by: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param modified_on: Gets date on which it got modified.
:type modified_on: datetime
:param name: Gets name.
:type name: str
:param pool_name: Gets pool name.
:type pool_name: str
:param project_reference: Gets or sets project reference.
:type project_reference: :class:`ProjectReference <azure.devops.v7_1.release.models.ProjectReference>`
:param properties:
:type properties: :class:`object <azure.devops.v7_1.release.models.object>`
:param reason: Gets reason of release.
:type reason: object
:param release_definition: Gets releaseDefinitionReference which specifies the reference of the release definition to which this release is associated.
:type release_definition: :class:`ReleaseDefinitionShallowReference <azure.devops.v7_1.release.models.ReleaseDefinitionShallowReference>`
:param release_definition_revision: Gets or sets the release definition revision.
:type release_definition_revision: int
:param release_name_format: Gets release name format.
:type release_name_format: str
:param status: Gets status.
:type status: object
:param tags: Gets or sets list of tags.
:type tags: list of str
:param triggering_artifact_alias:
:type triggering_artifact_alias: str
:param url:
:type url: str
:param variable_groups: Gets the list of variable groups.
:type variable_groups: list of :class:`VariableGroup <azure.devops.v7_1.release.models.VariableGroup>`
:param variables: Gets or sets the dictionary of variables.
:type variables: dict
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'artifacts': {'key': 'artifacts', 'type': '[Artifact]'},
'comment': {'key': 'comment', 'type': 'str'},
'created_by': {'key': 'createdBy', 'type': 'IdentityRef'},
'created_for': {'key': 'createdFor', 'type': 'IdentityRef'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'definition_snapshot_revision': {'key': 'definitionSnapshotRevision', 'type': 'int'},
'description': {'key': 'description', 'type': 'str'},
'environments': {'key': 'environments', 'type': '[ReleaseEnvironment]'},
'id': {'key': 'id', 'type': 'int'},
'keep_forever': {'key': 'keepForever', 'type': 'bool'},
'logs_container_url': {'key': 'logsContainerUrl', 'type': 'str'},
'modified_by': {'key': 'modifiedBy', 'type': 'IdentityRef'},
'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'pool_name': {'key': 'poolName', 'type': 'str'},
'project_reference': {'key': 'projectReference', 'type': 'ProjectReference'},
'properties': {'key': 'properties', 'type': 'object'},
'reason': {'key': 'reason', 'type': 'object'},
'release_definition': {'key': 'releaseDefinition', 'type': 'ReleaseDefinitionShallowReference'},
'release_definition_revision': {'key': 'releaseDefinitionRevision', 'type': 'int'},
'release_name_format': {'key': 'releaseNameFormat', 'type': 'str'},
'status': {'key': 'status', 'type': 'object'},
'tags': {'key': 'tags', 'type': '[str]'},
'triggering_artifact_alias': {'key': 'triggeringArtifactAlias', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'variable_groups': {'key': 'variableGroups', 'type': '[VariableGroup]'},
'variables': {'key': 'variables', 'type': '{ConfigurationVariableValue}'}
}
def __init__(self, _links=None, artifacts=None, comment=None, created_by=None, created_for=None, created_on=None, definition_snapshot_revision=None, description=None, environments=None, id=None, keep_forever=None, logs_container_url=None, modified_by=None, modified_on=None, name=None, pool_name=None, project_reference=None, properties=None, reason=None, release_definition=None, release_definition_revision=None, release_name_format=None, status=None, tags=None, triggering_artifact_alias=None, url=None, variable_groups=None, variables=None):
super(Release, self).__init__()
self._links = _links
self.artifacts = artifacts
self.comment = comment
self.created_by = created_by
self.created_for = created_for
self.created_on = created_on
self.definition_snapshot_revision = definition_snapshot_revision
self.description = description
self.environments = environments
self.id = id
self.keep_forever = keep_forever
self.logs_container_url = logs_container_url
self.modified_by = modified_by
self.modified_on = modified_on
self.name = name
self.pool_name = pool_name
self.project_reference = project_reference
self.properties = properties
self.reason = reason
self.release_definition = release_definition
self.release_definition_revision = release_definition_revision
self.release_name_format = release_name_format
self.status = status
self.tags = tags
self.triggering_artifact_alias = triggering_artifact_alias
self.url = url
self.variable_groups = variable_groups
self.variables = variables
class ReleaseApproval(Model):
"""
:param approval_type: Gets or sets the type of approval.
:type approval_type: object
:param approved_by: Gets the identity who approved.
:type approved_by: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param approver: Gets or sets the identity who should approve.
:type approver: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param attempt: Gets or sets attempt which specifies as which deployment attempt it belongs.
:type attempt: int
:param comments: Gets or sets comments for approval.
:type comments: str
:param created_on: Gets date on which it got created.
:type created_on: datetime
:param history: Gets history which specifies all approvals associated with this approval.
:type history: list of :class:`ReleaseApprovalHistory <azure.devops.v7_1.release.models.ReleaseApprovalHistory>`
:param id: Gets the unique identifier of this field.
:type id: int
:param is_automated: Gets or sets as approval is automated or not.
:type is_automated: bool
:param is_notification_on:
:type is_notification_on: bool
:param modified_on: Gets date on which it got modified.
:type modified_on: datetime
:param rank: Gets or sets rank which specifies the order of the approval. e.g. Same rank denotes parallel approval.
:type rank: int
:param release: Gets releaseReference which specifies the reference of the release to which this approval is associated.
:type release: :class:`ReleaseShallowReference <azure.devops.v7_1.release.models.ReleaseShallowReference>`
:param release_definition: Gets releaseDefinitionReference which specifies the reference of the release definition to which this approval is associated.
:type release_definition: :class:`ReleaseDefinitionShallowReference <azure.devops.v7_1.release.models.ReleaseDefinitionShallowReference>`
:param release_environment: Gets releaseEnvironmentReference which specifies the reference of the release environment to which this approval is associated.
:type release_environment: :class:`ReleaseEnvironmentShallowReference <azure.devops.v7_1.release.models.ReleaseEnvironmentShallowReference>`
:param revision: Gets the revision number.
:type revision: int
:param status: Gets or sets the status of the approval.
:type status: object
:param trial_number:
:type trial_number: int
:param url: Gets url to access the approval.
:type url: str
"""
_attribute_map = {
'approval_type': {'key': 'approvalType', 'type': 'object'},
'approved_by': {'key': 'approvedBy', 'type': 'IdentityRef'},
'approver': {'key': 'approver', 'type': 'IdentityRef'},
'attempt': {'key': 'attempt', 'type': 'int'},
'comments': {'key': 'comments', 'type': 'str'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'history': {'key': 'history', 'type': '[ReleaseApprovalHistory]'},
'id': {'key': 'id', 'type': 'int'},
'is_automated': {'key': 'isAutomated', 'type': 'bool'},
'is_notification_on': {'key': 'isNotificationOn', 'type': 'bool'},
'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
'rank': {'key': 'rank', 'type': 'int'},
'release': {'key': 'release', 'type': 'ReleaseShallowReference'},
'release_definition': {'key': 'releaseDefinition', 'type': 'ReleaseDefinitionShallowReference'},
'release_environment': {'key': 'releaseEnvironment', 'type': 'ReleaseEnvironmentShallowReference'},
'revision': {'key': 'revision', 'type': 'int'},
'status': {'key': 'status', 'type': 'object'},
'trial_number': {'key': 'trialNumber', 'type': 'int'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, approval_type=None, approved_by=None, approver=None, attempt=None, comments=None, created_on=None, history=None, id=None, is_automated=None, is_notification_on=None, modified_on=None, rank=None, release=None, release_definition=None, release_environment=None, revision=None, status=None, trial_number=None, url=None):
super(ReleaseApproval, self).__init__()
self.approval_type = approval_type
self.approved_by = approved_by
self.approver = approver
self.attempt = attempt
self.comments = comments
self.created_on = created_on
self.history = history
self.id = id
self.is_automated = is_automated
self.is_notification_on = is_notification_on
self.modified_on = modified_on
self.rank = rank
self.release = release
self.release_definition = release_definition
self.release_environment = release_environment
self.revision = revision
self.status = status
self.trial_number = trial_number
self.url = url
class ReleaseApprovalHistory(Model):
"""
:param approver: Identity of the approver.
:type approver: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param comments: Approval history comments.
:type comments: str
:param created_on: Time when this approval created.
:type created_on: datetime
:param changed_by: Identity of the object who changed approval.
:type changed_by: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param modified_on: Time when this approval modified.
:type modified_on: datetime
:param revision: Approval history revision.
:type revision: int
"""
_attribute_map = {
'approver': {'key': 'approver', 'type': 'IdentityRef'},
'comments': {'key': 'comments', 'type': 'str'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'changed_by': {'key': 'changedBy', 'type': 'IdentityRef'},
'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
'revision': {'key': 'revision', 'type': 'int'}
}
def __init__(self, approver=None, comments=None, created_on=None, changed_by=None, modified_on=None, revision=None):
super(ReleaseApprovalHistory, self).__init__()
self.approver = approver
self.comments = comments
self.created_on = created_on
self.changed_by = changed_by
self.modified_on = modified_on
self.revision = revision
class ReleaseCondition(Condition):
"""
:param condition_type: Gets or sets the condition type.
:type condition_type: object
:param name: Gets or sets the name of the condition. e.g. 'ReleaseStarted'.
:type name: str
:param value: Gets or set value of the condition.
:type value: str
:param result: The release condition result.
:type result: bool
"""
_attribute_map = {
'condition_type': {'key': 'conditionType', 'type': 'object'},
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
'result': {'key': 'result', 'type': 'bool'}
}
def __init__(self, condition_type=None, name=None, value=None, result=None):
super(ReleaseCondition, self).__init__(condition_type=condition_type, name=name, value=value)
self.result = result
class ReleaseDefinitionApprovals(Model):
"""
:param approval_options: Gets or sets the approval options.
:type approval_options: :class:`ApprovalOptions <azure.devops.v7_1.release.models.ApprovalOptions>`
:param approvals: Gets or sets the approvals.
:type approvals: list of :class:`ReleaseDefinitionApprovalStep <azure.devops.v7_1.release.models.ReleaseDefinitionApprovalStep>`
"""
_attribute_map = {
'approval_options': {'key': 'approvalOptions', 'type': 'ApprovalOptions'},
'approvals': {'key': 'approvals', 'type': '[ReleaseDefinitionApprovalStep]'}
}
def __init__(self, approval_options=None, approvals=None):
super(ReleaseDefinitionApprovals, self).__init__()
self.approval_options = approval_options
self.approvals = approvals
class ReleaseDefinitionEnvironment(Model):
"""
:param badge_url: Gets or sets the BadgeUrl. BadgeUrl will be used when Badge will be enabled in Release Definition Environment.
:type badge_url: str
:param conditions: Gets or sets the environment conditions.
:type conditions: list of :class:`Condition <azure.devops.v7_1.release.models.Condition>`
:param current_release: Gets or sets the current release reference.
:type current_release: :class:`ReleaseShallowReference <azure.devops.v7_1.release.models.ReleaseShallowReference>`
:param demands: Gets or sets the demands.
:type demands: list of :class:`object <azure.devops.v7_1.release.models.object>`
:param deploy_phases: Gets or sets the deploy phases of environment.
:type deploy_phases: list of :class:`object <azure.devops.v7_1.release.models.object>`
:param deploy_step: Gets or sets the deploystep.
:type deploy_step: :class:`ReleaseDefinitionDeployStep <azure.devops.v7_1.release.models.ReleaseDefinitionDeployStep>`
:param environment_options: Gets or sets the environment options.
:type environment_options: :class:`EnvironmentOptions <azure.devops.v7_1.release.models.EnvironmentOptions>`
:param environment_triggers: Gets or sets the triggers on environment.
:type environment_triggers: list of :class:`EnvironmentTrigger <azure.devops.v7_1.release.models.EnvironmentTrigger>`
:param execution_policy: Gets or sets the environment execution policy.
:type execution_policy: :class:`EnvironmentExecutionPolicy <azure.devops.v7_1.release.models.EnvironmentExecutionPolicy>`
:param id: Gets and sets the ID of the ReleaseDefinitionEnvironment.
:type id: int
:param name: Gets and sets the name of the ReleaseDefinitionEnvironment.
:type name: str
:param owner: Gets and sets the Owner of the ReleaseDefinitionEnvironment.
:type owner: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param post_deploy_approvals: Gets or sets the post deployment approvals.
:type post_deploy_approvals: :class:`ReleaseDefinitionApprovals <azure.devops.v7_1.release.models.ReleaseDefinitionApprovals>`
:param post_deployment_gates: Gets or sets the post deployment gates.
:type post_deployment_gates: :class:`ReleaseDefinitionGatesStep <azure.devops.v7_1.release.models.ReleaseDefinitionGatesStep>`
:param pre_deploy_approvals: Gets or sets the pre deployment approvals.
:type pre_deploy_approvals: :class:`ReleaseDefinitionApprovals <azure.devops.v7_1.release.models.ReleaseDefinitionApprovals>`
:param pre_deployment_gates: Gets or sets the pre deployment gates.
:type pre_deployment_gates: :class:`ReleaseDefinitionGatesStep <azure.devops.v7_1.release.models.ReleaseDefinitionGatesStep>`
:param process_parameters: Gets or sets the environment process parameters.
:type process_parameters: :class:`ProcessParameters <azure.devops.v7_1.release.models.ProcessParameters>`
:param properties: Gets or sets the properties on environment.
:type properties: :class:`object <azure.devops.v7_1.release.models.object>`
:param queue_id: Gets or sets the queue ID.
:type queue_id: int
:param rank: Gets and sets the rank of the ReleaseDefinitionEnvironment.
:type rank: int
:param retention_policy: Gets or sets the environment retention policy.
:type retention_policy: :class:`EnvironmentRetentionPolicy <azure.devops.v7_1.release.models.EnvironmentRetentionPolicy>`
:param run_options:
:type run_options: dict
:param schedules: Gets or sets the schedules
:type schedules: list of :class:`ReleaseSchedule <azure.devops.v7_1.release.models.ReleaseSchedule>`
:param variable_groups: Gets or sets the variable groups.
:type variable_groups: list of int
:param variables: Gets and sets the variables.
:type variables: dict
"""
_attribute_map = {
'badge_url': {'key': 'badgeUrl', 'type': 'str'},
'conditions': {'key': 'conditions', 'type': '[Condition]'},
'current_release': {'key': 'currentRelease', 'type': 'ReleaseShallowReference'},
'demands': {'key': 'demands', 'type': '[object]'},
'deploy_phases': {'key': 'deployPhases', 'type': '[object]'},
'deploy_step': {'key': 'deployStep', 'type': 'ReleaseDefinitionDeployStep'},
'environment_options': {'key': 'environmentOptions', 'type': 'EnvironmentOptions'},
'environment_triggers': {'key': 'environmentTriggers', 'type': '[EnvironmentTrigger]'},
'execution_policy': {'key': 'executionPolicy', 'type': 'EnvironmentExecutionPolicy'},
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'owner': {'key': 'owner', 'type': 'IdentityRef'},
'post_deploy_approvals': {'key': 'postDeployApprovals', 'type': 'ReleaseDefinitionApprovals'},
'post_deployment_gates': {'key': 'postDeploymentGates', 'type': 'ReleaseDefinitionGatesStep'},
'pre_deploy_approvals': {'key': 'preDeployApprovals', 'type': 'ReleaseDefinitionApprovals'},
'pre_deployment_gates': {'key': 'preDeploymentGates', 'type': 'ReleaseDefinitionGatesStep'},
'process_parameters': {'key': 'processParameters', 'type': 'ProcessParameters'},
'properties': {'key': 'properties', 'type': 'object'},
'queue_id': {'key': 'queueId', 'type': 'int'},
'rank': {'key': 'rank', 'type': 'int'},
'retention_policy': {'key': 'retentionPolicy', 'type': 'EnvironmentRetentionPolicy'},
'run_options': {'key': 'runOptions', 'type': '{str}'},
'schedules': {'key': 'schedules', 'type': '[ReleaseSchedule]'},
'variable_groups': {'key': 'variableGroups', 'type': '[int]'},
'variables': {'key': 'variables', 'type': '{ConfigurationVariableValue}'}
}
def __init__(self, badge_url=None, conditions=None, current_release=None, demands=None, deploy_phases=None, deploy_step=None, environment_options=None, environment_triggers=None, execution_policy=None, id=None, name=None, owner=None, post_deploy_approvals=None, post_deployment_gates=None, pre_deploy_approvals=None, pre_deployment_gates=None, process_parameters=None, properties=None, queue_id=None, rank=None, retention_policy=None, run_options=None, schedules=None, variable_groups=None, variables=None):
super(ReleaseDefinitionEnvironment, self).__init__()
self.badge_url = badge_url
self.conditions = conditions
self.current_release = current_release
self.demands = demands
self.deploy_phases = deploy_phases
self.deploy_step = deploy_step
self.environment_options = environment_options
self.environment_triggers = environment_triggers
self.execution_policy = execution_policy
self.id = id
self.name = name
self.owner = owner
self.post_deploy_approvals = post_deploy_approvals
self.post_deployment_gates = post_deployment_gates
self.pre_deploy_approvals = pre_deploy_approvals
self.pre_deployment_gates = pre_deployment_gates
self.process_parameters = process_parameters
self.properties = properties
self.queue_id = queue_id
self.rank = rank
self.retention_policy = retention_policy
self.run_options = run_options
self.schedules = schedules
self.variable_groups = variable_groups
self.variables = variables
class ReleaseDefinitionEnvironmentStep(Model):
"""
:param id: ID of the approval or deploy step.
:type id: int
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'}
}
def __init__(self, id=None):
super(ReleaseDefinitionEnvironmentStep, self).__init__()
self.id = id
class ReleaseDefinitionEnvironmentSummary(Model):
"""
:param id: ID of ReleaseDefinition environment summary.
:type id: int
:param last_releases: List of release shallow reference deployed using this ReleaseDefinition.
:type last_releases: list of :class:`ReleaseShallowReference <azure.devops.v7_1.release.models.ReleaseShallowReference>`
:param name: Name of ReleaseDefinition environment summary.
:type name: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'last_releases': {'key': 'lastReleases', 'type': '[ReleaseShallowReference]'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, id=None, last_releases=None, name=None):
super(ReleaseDefinitionEnvironmentSummary, self).__init__()
self.id = id
self.last_releases = last_releases
self.name = name
class ReleaseDefinitionEnvironmentTemplate(Model):
"""
:param can_delete: Indicates whether template can be deleted or not.
:type can_delete: bool
:param category: Category of the ReleaseDefinition environment template.
:type category: str
:param description: Description of the ReleaseDefinition environment template.
:type description: str
:param environment: ReleaseDefinition environment data which used to create this template.
:type environment: :class:`ReleaseDefinitionEnvironment <azure.devops.v7_1.release.models.ReleaseDefinitionEnvironment>`
:param icon_task_id: ID of the task which used to display icon used for this template.
:type icon_task_id: str
:param icon_uri: Icon uri of the template.
:type icon_uri: str
:param id: ID of the ReleaseDefinition environment template.
:type id: str
:param is_deleted: Indicates whether template deleted or not.
:type is_deleted: bool
:param name: Name of the ReleaseDefinition environment template.
:type name: str
"""
_attribute_map = {
'can_delete': {'key': 'canDelete', 'type': 'bool'},
'category': {'key': 'category', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'environment': {'key': 'environment', 'type': 'ReleaseDefinitionEnvironment'},
'icon_task_id': {'key': 'iconTaskId', 'type': 'str'},
'icon_uri': {'key': 'iconUri', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'is_deleted': {'key': 'isDeleted', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, can_delete=None, category=None, description=None, environment=None, icon_task_id=None, icon_uri=None, id=None, is_deleted=None, name=None):
super(ReleaseDefinitionEnvironmentTemplate, self).__init__()
self.can_delete = can_delete
self.category = category
self.description = description
self.environment = environment
self.icon_task_id = icon_task_id
self.icon_uri = icon_uri
self.id = id
self.is_deleted = is_deleted
self.name = name
class ReleaseDefinitionGate(Model):
"""
:param tasks: Gets or sets the gates workflow.
:type tasks: list of :class:`WorkflowTask <azure.devops.v7_1.release.models.WorkflowTask>`
"""
_attribute_map = {
'tasks': {'key': 'tasks', 'type': '[WorkflowTask]'}
}
def __init__(self, tasks=None):
super(ReleaseDefinitionGate, self).__init__()
self.tasks = tasks
class ReleaseDefinitionGatesOptions(Model):
"""
:param is_enabled: Gets or sets as the gates enabled or not.
:type is_enabled: bool
:param minimum_success_duration: Gets or sets the minimum duration for steady results after a successful gates evaluation.
:type minimum_success_duration: int
:param sampling_interval: Gets or sets the time between re-evaluation of gates.
:type sampling_interval: int
:param stabilization_time: Gets or sets the delay before evaluation.
:type stabilization_time: int
:param timeout: Gets or sets the timeout after which gates fail.
:type timeout: int
"""
_attribute_map = {
'is_enabled': {'key': 'isEnabled', 'type': 'bool'},
'minimum_success_duration': {'key': 'minimumSuccessDuration', 'type': 'int'},
'sampling_interval': {'key': 'samplingInterval', 'type': 'int'},
'stabilization_time': {'key': 'stabilizationTime', 'type': 'int'},
'timeout': {'key': 'timeout', 'type': 'int'}
}
def __init__(self, is_enabled=None, minimum_success_duration=None, sampling_interval=None, stabilization_time=None, timeout=None):
super(ReleaseDefinitionGatesOptions, self).__init__()
self.is_enabled = is_enabled
self.minimum_success_duration = minimum_success_duration
self.sampling_interval = sampling_interval
self.stabilization_time = stabilization_time
self.timeout = timeout
class ReleaseDefinitionGatesStep(Model):
"""
:param gates: Gets or sets the gates.
:type gates: list of :class:`ReleaseDefinitionGate <azure.devops.v7_1.release.models.ReleaseDefinitionGate>`
:param gates_options: Gets or sets the gate options.
:type gates_options: :class:`ReleaseDefinitionGatesOptions <azure.devops.v7_1.release.models.ReleaseDefinitionGatesOptions>`
:param id: ID of the ReleaseDefinitionGateStep.
:type id: int
"""
_attribute_map = {
'gates': {'key': 'gates', 'type': '[ReleaseDefinitionGate]'},
'gates_options': {'key': 'gatesOptions', 'type': 'ReleaseDefinitionGatesOptions'},
'id': {'key': 'id', 'type': 'int'}
}
def __init__(self, gates=None, gates_options=None, id=None):
super(ReleaseDefinitionGatesStep, self).__init__()
self.gates = gates
self.gates_options = gates_options
self.id = id
class ReleaseDefinitionRevision(Model):
"""
:param api_version: Gets api-version for revision object.
:type api_version: str
:param comment: Gets comments for revision.
:type comment: str
:param definition_id: Get id of the definition.
:type definition_id: int
:param definition_url: Gets definition URL.
:type definition_url: str
:param changed_by: Gets the identity who did change.
:type changed_by: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param changed_date: Gets date on which ReleaseDefinition changed.
:type changed_date: datetime
:param change_type: Gets type of change.
:type change_type: object
:param revision: Get revision number of the definition.
:type revision: int
"""
_attribute_map = {
'api_version': {'key': 'apiVersion', 'type': 'str'},
'comment': {'key': 'comment', 'type': 'str'},
'definition_id': {'key': 'definitionId', 'type': 'int'},
'definition_url': {'key': 'definitionUrl', 'type': 'str'},
'changed_by': {'key': 'changedBy', 'type': 'IdentityRef'},
'changed_date': {'key': 'changedDate', 'type': 'iso-8601'},
'change_type': {'key': 'changeType', 'type': 'object'},
'revision': {'key': 'revision', 'type': 'int'}
}
def __init__(self, api_version=None, comment=None, definition_id=None, definition_url=None, changed_by=None, changed_date=None, change_type=None, revision=None):
super(ReleaseDefinitionRevision, self).__init__()
self.api_version = api_version
self.comment = comment
self.definition_id = definition_id
self.definition_url = definition_url
self.changed_by = changed_by
self.changed_date = changed_date
self.change_type = change_type
self.revision = revision
class ReleaseDefinitionShallowReference(Model):
"""
:param _links: Gets the links to related resources, APIs, and views for the release definition.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.release.models.ReferenceLinks>`
:param id: Gets the unique identifier of release definition.
:type id: int
:param name: Gets or sets the name of the release definition.
:type name: str
:param path: Gets or sets the path of the release definition.
:type path: str
:param project_reference: Gets or sets project reference.
:type project_reference: :class:`ProjectReference <azure.devops.v7_1.release.models.ProjectReference>`
:param url: Gets the REST API url to access the release definition.
:type url: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'path': {'key': 'path', 'type': 'str'},
'project_reference': {'key': 'projectReference', 'type': 'ProjectReference'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, _links=None, id=None, name=None, path=None, project_reference=None, url=None):
super(ReleaseDefinitionShallowReference, self).__init__()
self._links = _links
self.id = id
self.name = name
self.path = path
self.project_reference = project_reference
self.url = url
class ReleaseDefinitionSummary(Model):
"""
:param environments: List of Release Definition environment summary.
:type environments: list of :class:`ReleaseDefinitionEnvironmentSummary <azure.devops.v7_1.release.models.ReleaseDefinitionEnvironmentSummary>`
:param release_definition: Release Definition reference.
:type release_definition: :class:`ReleaseDefinitionShallowReference <azure.devops.v7_1.release.models.ReleaseDefinitionShallowReference>`
:param releases: List of releases deployed using this Release Definition.
:type releases: list of :class:`Release <azure.devops.v7_1.release.models.Release>`
"""
_attribute_map = {
'environments': {'key': 'environments', 'type': '[ReleaseDefinitionEnvironmentSummary]'},
'release_definition': {'key': 'releaseDefinition', 'type': 'ReleaseDefinitionShallowReference'},
'releases': {'key': 'releases', 'type': '[Release]'}
}
def __init__(self, environments=None, release_definition=None, releases=None):
super(ReleaseDefinitionSummary, self).__init__()
self.environments = environments
self.release_definition = release_definition
self.releases = releases
class ReleaseDefinitionUndeleteParameter(Model):
"""
:param comment: Gets or sets comment.
:type comment: str
"""
_attribute_map = {
'comment': {'key': 'comment', 'type': 'str'}
}
def __init__(self, comment=None):
super(ReleaseDefinitionUndeleteParameter, self).__init__()
self.comment = comment
class ReleaseDeployPhase(Model):
"""
:param deployment_jobs: Deployment jobs of the phase.
:type deployment_jobs: list of :class:`DeploymentJob <azure.devops.v7_1.release.models.DeploymentJob>`
:param error_log: Phase execution error logs.
:type error_log: str
:param id: ID of the phase.
:type id: int
:param manual_interventions: List of manual intervention tasks execution information in phase.
:type manual_interventions: list of :class:`ManualIntervention <azure.devops.v7_1.release.models.ManualIntervention>`
:param name: Name of the phase.
:type name: str
:param phase_id: ID of the phase.
:type phase_id: str
:param phase_type: Type of the phase.
:type phase_type: object
:param rank: Rank of the phase.
:type rank: int
:param run_plan_id: Run Plan ID of the phase.
:type run_plan_id: str
:param started_on: Phase start time.
:type started_on: datetime
:param status: Status of the phase.
:type status: object
"""
_attribute_map = {
'deployment_jobs': {'key': 'deploymentJobs', 'type': '[DeploymentJob]'},
'error_log': {'key': 'errorLog', 'type': 'str'},
'id': {'key': 'id', 'type': 'int'},
'manual_interventions': {'key': 'manualInterventions', 'type': '[ManualIntervention]'},
'name': {'key': 'name', 'type': 'str'},
'phase_id': {'key': 'phaseId', 'type': 'str'},
'phase_type': {'key': 'phaseType', 'type': 'object'},
'rank': {'key': 'rank', 'type': 'int'},
'run_plan_id': {'key': 'runPlanId', 'type': 'str'},
'started_on': {'key': 'startedOn', 'type': 'iso-8601'},
'status': {'key': 'status', 'type': 'object'}
}
def __init__(self, deployment_jobs=None, error_log=None, id=None, manual_interventions=None, name=None, phase_id=None, phase_type=None, rank=None, run_plan_id=None, started_on=None, status=None):
super(ReleaseDeployPhase, self).__init__()
self.deployment_jobs = deployment_jobs
self.error_log = error_log
self.id = id
self.manual_interventions = manual_interventions
self.name = name
self.phase_id = phase_id
self.phase_type = phase_type
self.rank = rank
self.run_plan_id = run_plan_id
self.started_on = started_on
self.status = status
class ReleaseEnvironment(Model):
"""
:param conditions: Gets list of conditions.
:type conditions: list of :class:`ReleaseCondition <azure.devops.v7_1.release.models.ReleaseCondition>`
:param created_on: Gets date on which it got created.
:type created_on: datetime
:param definition_environment_id: Gets definition environment id.
:type definition_environment_id: int
:param demands: Gets demands.
:type demands: list of :class:`object <azure.devops.v7_1.release.models.object>`
:param deploy_phases_snapshot: Gets list of deploy phases snapshot.
:type deploy_phases_snapshot: list of :class:`object <azure.devops.v7_1.release.models.object>`
:param deploy_steps: Gets deploy steps.
:type deploy_steps: list of :class:`DeploymentAttempt <azure.devops.v7_1.release.models.DeploymentAttempt>`
:param environment_options: Gets environment options.
:type environment_options: :class:`EnvironmentOptions <azure.devops.v7_1.release.models.EnvironmentOptions>`
:param id: Gets the unique identifier of this field.
:type id: int
:param modified_on: Gets date on which it got modified.
:type modified_on: datetime
:param name: Gets name.
:type name: str
:param next_scheduled_utc_time: Gets next scheduled UTC time.
:type next_scheduled_utc_time: datetime
:param owner: Gets the identity who is owner for release environment.
:type owner: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param post_approvals_snapshot: Gets list of post deploy approvals snapshot.
:type post_approvals_snapshot: :class:`ReleaseDefinitionApprovals <azure.devops.v7_1.release.models.ReleaseDefinitionApprovals>`
:param post_deploy_approvals: Gets list of post deploy approvals.
:type post_deploy_approvals: list of :class:`ReleaseApproval <azure.devops.v7_1.release.models.ReleaseApproval>`
:param post_deployment_gates_snapshot: Post deployment gates snapshot data.
:type post_deployment_gates_snapshot: :class:`ReleaseDefinitionGatesStep <azure.devops.v7_1.release.models.ReleaseDefinitionGatesStep>`
:param pre_approvals_snapshot: Gets list of pre deploy approvals snapshot.
:type pre_approvals_snapshot: :class:`ReleaseDefinitionApprovals <azure.devops.v7_1.release.models.ReleaseDefinitionApprovals>`
:param pre_deploy_approvals: Gets list of pre deploy approvals.
:type pre_deploy_approvals: list of :class:`ReleaseApproval <azure.devops.v7_1.release.models.ReleaseApproval>`
:param pre_deployment_gates_snapshot: Pre deployment gates snapshot data.
:type pre_deployment_gates_snapshot: :class:`ReleaseDefinitionGatesStep <azure.devops.v7_1.release.models.ReleaseDefinitionGatesStep>`
:param process_parameters: Gets process parameters.
:type process_parameters: :class:`ProcessParameters <azure.devops.v7_1.release.models.ProcessParameters>`
:param queue_id: Gets queue id.
:type queue_id: int
:param rank: Gets rank.
:type rank: int
:param release: Gets release reference which specifies the reference of the release to which this release environment is associated.
:type release: :class:`ReleaseShallowReference <azure.devops.v7_1.release.models.ReleaseShallowReference>`
:param release_created_by: Gets the identity who created release.
:type release_created_by: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param release_definition: Gets releaseDefinitionReference which specifies the reference of the release definition to which this release environment is associated.
:type release_definition: :class:`ReleaseDefinitionShallowReference <azure.devops.v7_1.release.models.ReleaseDefinitionShallowReference>`
:param release_description: Gets release description.
:type release_description: str
:param release_id: Gets release id.
:type release_id: int
:param scheduled_deployment_time: Gets schedule deployment time of release environment.
:type scheduled_deployment_time: datetime
:param schedules: Gets list of schedules.
:type schedules: list of :class:`ReleaseSchedule <azure.devops.v7_1.release.models.ReleaseSchedule>`
:param status: Gets environment status.
:type status: object
:param time_to_deploy: Gets time to deploy.
:type time_to_deploy: float
:param trigger_reason: Gets trigger reason.
:type trigger_reason: str
:param variable_groups: Gets the list of variable groups.
:type variable_groups: list of :class:`VariableGroup <azure.devops.v7_1.release.models.VariableGroup>`
:param variables: Gets the dictionary of variables.
:type variables: dict
:param workflow_tasks: Gets list of workflow tasks.
:type workflow_tasks: list of :class:`WorkflowTask <azure.devops.v7_1.release.models.WorkflowTask>`
"""
_attribute_map = {
'conditions': {'key': 'conditions', 'type': '[ReleaseCondition]'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'definition_environment_id': {'key': 'definitionEnvironmentId', 'type': 'int'},
'demands': {'key': 'demands', 'type': '[object]'},
'deploy_phases_snapshot': {'key': 'deployPhasesSnapshot', 'type': '[object]'},
'deploy_steps': {'key': 'deploySteps', 'type': '[DeploymentAttempt]'},
'environment_options': {'key': 'environmentOptions', 'type': 'EnvironmentOptions'},
'id': {'key': 'id', 'type': 'int'},
'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'next_scheduled_utc_time': {'key': 'nextScheduledUtcTime', 'type': 'iso-8601'},
'owner': {'key': 'owner', 'type': 'IdentityRef'},
'post_approvals_snapshot': {'key': 'postApprovalsSnapshot', 'type': 'ReleaseDefinitionApprovals'},
'post_deploy_approvals': {'key': 'postDeployApprovals', 'type': '[ReleaseApproval]'},
'post_deployment_gates_snapshot': {'key': 'postDeploymentGatesSnapshot', 'type': 'ReleaseDefinitionGatesStep'},
'pre_approvals_snapshot': {'key': 'preApprovalsSnapshot', 'type': 'ReleaseDefinitionApprovals'},
'pre_deploy_approvals': {'key': 'preDeployApprovals', 'type': '[ReleaseApproval]'},
'pre_deployment_gates_snapshot': {'key': 'preDeploymentGatesSnapshot', 'type': 'ReleaseDefinitionGatesStep'},
'process_parameters': {'key': 'processParameters', 'type': 'ProcessParameters'},
'queue_id': {'key': 'queueId', 'type': 'int'},
'rank': {'key': 'rank', 'type': 'int'},
'release': {'key': 'release', 'type': 'ReleaseShallowReference'},
'release_created_by': {'key': 'releaseCreatedBy', 'type': 'IdentityRef'},
'release_definition': {'key': 'releaseDefinition', 'type': 'ReleaseDefinitionShallowReference'},
'release_description': {'key': 'releaseDescription', 'type': 'str'},
'release_id': {'key': 'releaseId', 'type': 'int'},
'scheduled_deployment_time': {'key': 'scheduledDeploymentTime', 'type': 'iso-8601'},
'schedules': {'key': 'schedules', 'type': '[ReleaseSchedule]'},
'status': {'key': 'status', 'type': 'object'},
'time_to_deploy': {'key': 'timeToDeploy', 'type': 'float'},
'trigger_reason': {'key': 'triggerReason', 'type': 'str'},
'variable_groups': {'key': 'variableGroups', 'type': '[VariableGroup]'},
'variables': {'key': 'variables', 'type': '{ConfigurationVariableValue}'},
'workflow_tasks': {'key': 'workflowTasks', 'type': '[WorkflowTask]'}
}
def __init__(self, conditions=None, created_on=None, definition_environment_id=None, demands=None, deploy_phases_snapshot=None, deploy_steps=None, environment_options=None, id=None, modified_on=None, name=None, next_scheduled_utc_time=None, owner=None, post_approvals_snapshot=None, post_deploy_approvals=None, post_deployment_gates_snapshot=None, pre_approvals_snapshot=None, pre_deploy_approvals=None, pre_deployment_gates_snapshot=None, process_parameters=None, queue_id=None, rank=None, release=None, release_created_by=None, release_definition=None, release_description=None, release_id=None, scheduled_deployment_time=None, schedules=None, status=None, time_to_deploy=None, trigger_reason=None, variable_groups=None, variables=None, workflow_tasks=None):
super(ReleaseEnvironment, self).__init__()
self.conditions = conditions
self.created_on = created_on
self.definition_environment_id = definition_environment_id
self.demands = demands
self.deploy_phases_snapshot = deploy_phases_snapshot
self.deploy_steps = deploy_steps
self.environment_options = environment_options
self.id = id
self.modified_on = modified_on
self.name = name
self.next_scheduled_utc_time = next_scheduled_utc_time
self.owner = owner
self.post_approvals_snapshot = post_approvals_snapshot
self.post_deploy_approvals = post_deploy_approvals
self.post_deployment_gates_snapshot = post_deployment_gates_snapshot
self.pre_approvals_snapshot = pre_approvals_snapshot
self.pre_deploy_approvals = pre_deploy_approvals
self.pre_deployment_gates_snapshot = pre_deployment_gates_snapshot
self.process_parameters = process_parameters
self.queue_id = queue_id
self.rank = rank
self.release = release
self.release_created_by = release_created_by
self.release_definition = release_definition
self.release_description = release_description
self.release_id = release_id
self.scheduled_deployment_time = scheduled_deployment_time
self.schedules = schedules
self.status = status
self.time_to_deploy = time_to_deploy
self.trigger_reason = trigger_reason
self.variable_groups = variable_groups
self.variables = variables
self.workflow_tasks = workflow_tasks
class ReleaseEnvironmentShallowReference(Model):
"""
:param _links: Gets the links to related resources, APIs, and views for the release environment.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.release.models.ReferenceLinks>`
:param id: Gets the unique identifier of release environment.
:type id: int
:param name: Gets or sets the name of the release environment.
:type name: str
:param url: Gets the REST API url to access the release environment.
:type url: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, _links=None, id=None, name=None, url=None):
super(ReleaseEnvironmentShallowReference, self).__init__()
self._links = _links
self.id = id
self.name = name
self.url = url
class ReleaseEnvironmentUpdateMetadata(Model):
"""
:param comment: Gets or sets comment.
:type comment: str
:param scheduled_deployment_time: Gets or sets scheduled deployment time.
:type scheduled_deployment_time: datetime
:param status: Gets or sets status of environment.
:type status: object
:param variables: Sets list of environment variables to be overridden at deployment time.
:type variables: dict
"""
_attribute_map = {
'comment': {'key': 'comment', 'type': 'str'},
'scheduled_deployment_time': {'key': 'scheduledDeploymentTime', 'type': 'iso-8601'},
'status': {'key': 'status', 'type': 'object'},
'variables': {'key': 'variables', 'type': '{ConfigurationVariableValue}'}
}
def __init__(self, comment=None, scheduled_deployment_time=None, status=None, variables=None):
super(ReleaseEnvironmentUpdateMetadata, self).__init__()
self.comment = comment
self.scheduled_deployment_time = scheduled_deployment_time
self.status = status
self.variables = variables
class ReleaseGates(Model):
"""
:param deployment_jobs: Contains the gates job details of each evaluation.
:type deployment_jobs: list of :class:`DeploymentJob <azure.devops.v7_1.release.models.DeploymentJob>`
:param id: ID of release gates.
:type id: int
:param ignored_gates: List of ignored gates.
:type ignored_gates: list of :class:`IgnoredGate <azure.devops.v7_1.release.models.IgnoredGate>`
:param last_modified_on: Gates last modified time.
:type last_modified_on: datetime
:param run_plan_id: Run plan ID of the gates.
:type run_plan_id: str
:param stabilization_completed_on: Gates stabilization completed date and time.
:type stabilization_completed_on: datetime
:param started_on: Gates evaluation started time.
:type started_on: datetime
:param status: Status of release gates.
:type status: object
:param succeeding_since: Date and time at which all gates executed successfully.
:type succeeding_since: datetime
"""
_attribute_map = {
'deployment_jobs': {'key': 'deploymentJobs', 'type': '[DeploymentJob]'},
'id': {'key': 'id', 'type': 'int'},
'ignored_gates': {'key': 'ignoredGates', 'type': '[IgnoredGate]'},
'last_modified_on': {'key': 'lastModifiedOn', 'type': 'iso-8601'},
'run_plan_id': {'key': 'runPlanId', 'type': 'str'},
'stabilization_completed_on': {'key': 'stabilizationCompletedOn', 'type': 'iso-8601'},
'started_on': {'key': 'startedOn', 'type': 'iso-8601'},
'status': {'key': 'status', 'type': 'object'},
'succeeding_since': {'key': 'succeedingSince', 'type': 'iso-8601'}
}
def __init__(self, deployment_jobs=None, id=None, ignored_gates=None, last_modified_on=None, run_plan_id=None, stabilization_completed_on=None, started_on=None, status=None, succeeding_since=None):
super(ReleaseGates, self).__init__()
self.deployment_jobs = deployment_jobs
self.id = id
self.ignored_gates = ignored_gates
self.last_modified_on = last_modified_on
self.run_plan_id = run_plan_id
self.stabilization_completed_on = stabilization_completed_on
self.started_on = started_on
self.status = status
self.succeeding_since = succeeding_since
class ReleaseReference(Model):
"""
:param _links: Gets links to access the release.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.release.models.ReferenceLinks>`
:param artifacts: Gets list of artifacts.
:type artifacts: list of :class:`Artifact <azure.devops.v7_1.release.models.Artifact>`
:param created_by: Gets the identity who created release.
:type created_by: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param created_on: Gets date on when this release created.
:type created_on: datetime
:param description: Gets description.
:type description: str
:param id: ID of the Release.
:type id: int
:param modified_by: Gets the identity who modified release.
:type modified_by: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param name: Gets name of release.
:type name: str
:param reason: Gets reason for release.
:type reason: object
:param release_definition: Gets release definition shallow reference.
:type release_definition: :class:`ReleaseDefinitionShallowReference <azure.devops.v7_1.release.models.ReleaseDefinitionShallowReference>`
:param url:
:type url: str
:param web_access_uri:
:type web_access_uri: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'artifacts': {'key': 'artifacts', 'type': '[Artifact]'},
'created_by': {'key': 'createdBy', 'type': 'IdentityRef'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'description': {'key': 'description', 'type': 'str'},
'id': {'key': 'id', 'type': 'int'},
'modified_by': {'key': 'modifiedBy', 'type': 'IdentityRef'},
'name': {'key': 'name', 'type': 'str'},
'reason': {'key': 'reason', 'type': 'object'},
'release_definition': {'key': 'releaseDefinition', 'type': 'ReleaseDefinitionShallowReference'},
'url': {'key': 'url', 'type': 'str'},
'web_access_uri': {'key': 'webAccessUri', 'type': 'str'}
}
def __init__(self, _links=None, artifacts=None, created_by=None, created_on=None, description=None, id=None, modified_by=None, name=None, reason=None, release_definition=None, url=None, web_access_uri=None):
super(ReleaseReference, self).__init__()
self._links = _links
self.artifacts = artifacts
self.created_by = created_by
self.created_on = created_on
self.description = description
self.id = id
self.modified_by = modified_by
self.name = name
self.reason = reason
self.release_definition = release_definition
self.url = url
self.web_access_uri = web_access_uri
class ReleaseRevision(Model):
"""
:param comment: Comment of the revision.
:type comment: str
:param definition_snapshot_revision: Release ID of which this revision belongs.
:type definition_snapshot_revision: int
:param changed_by: Gets or sets the identity who changed.
:type changed_by: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param changed_date: Change date of the revision.
:type changed_date: datetime
:param change_details: Change details of the revision.
:type change_details: str
:param change_type: Change details of the revision. Typically ChangeDetails values are Add and Update.
:type change_type: str
:param release_id: Gets or sets the release ID of which this revision belongs.
:type release_id: int
"""
_attribute_map = {
'comment': {'key': 'comment', 'type': 'str'},
'definition_snapshot_revision': {'key': 'definitionSnapshotRevision', 'type': 'int'},
'changed_by': {'key': 'changedBy', 'type': 'IdentityRef'},
'changed_date': {'key': 'changedDate', 'type': 'iso-8601'},
'change_details': {'key': 'changeDetails', 'type': 'str'},
'change_type': {'key': 'changeType', 'type': 'str'},
'release_id': {'key': 'releaseId', 'type': 'int'}
}
def __init__(self, comment=None, definition_snapshot_revision=None, changed_by=None, changed_date=None, change_details=None, change_type=None, release_id=None):
super(ReleaseRevision, self).__init__()
self.comment = comment
self.definition_snapshot_revision = definition_snapshot_revision
self.changed_by = changed_by
self.changed_date = changed_date
self.change_details = change_details
self.change_type = change_type
self.release_id = release_id
class ReleaseSettings(Model):
"""
:param compliance_settings: Release Compliance settings.
:type compliance_settings: :class:`ComplianceSettings <azure.devops.v7_1.release.models.ComplianceSettings>`
:param retention_settings: Release retention settings.
:type retention_settings: :class:`RetentionSettings <azure.devops.v7_1.release.models.RetentionSettings>`
"""
_attribute_map = {
'compliance_settings': {'key': 'complianceSettings', 'type': 'ComplianceSettings'},
'retention_settings': {'key': 'retentionSettings', 'type': 'RetentionSettings'}
}
def __init__(self, compliance_settings=None, retention_settings=None):
super(ReleaseSettings, self).__init__()
self.compliance_settings = compliance_settings
self.retention_settings = retention_settings
class ReleaseShallowReference(Model):
"""
:param _links: Gets the links to related resources, APIs, and views for the release.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.release.models.ReferenceLinks>`
:param id: Gets the unique identifier of release.
:type id: int
:param name: Gets or sets the name of the release.
:type name: str
:param url: Gets the REST API url to access the release.
:type url: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, _links=None, id=None, name=None, url=None):
super(ReleaseShallowReference, self).__init__()
self._links = _links
self.id = id
self.name = name
self.url = url
class ReleaseSchedule(Model):
"""
:param days_to_release: Days of the week to release.
:type days_to_release: object
:param job_id: Team Foundation Job Definition Job Id.
:type job_id: str
:param schedule_only_with_changes: Flag to determine if this schedule should only release if the associated artifact has been changed or release definition changed.
:type schedule_only_with_changes: bool
:param start_hours: Local time zone hour to start.
:type start_hours: int
:param start_minutes: Local time zone minute to start.
:type start_minutes: int
:param time_zone_id: Time zone Id of release schedule, such as 'UTC'.
:type time_zone_id: str
"""
_attribute_map = {
'days_to_release': {'key': 'daysToRelease', 'type': 'object'},
'job_id': {'key': 'jobId', 'type': 'str'},
'schedule_only_with_changes': {'key': 'scheduleOnlyWithChanges', 'type': 'bool'},
'start_hours': {'key': 'startHours', 'type': 'int'},
'start_minutes': {'key': 'startMinutes', 'type': 'int'},
'time_zone_id': {'key': 'timeZoneId', 'type': 'str'}
}
def __init__(self, days_to_release=None, job_id=None, schedule_only_with_changes=None, start_hours=None, start_minutes=None, time_zone_id=None):
super(ReleaseSchedule, self).__init__()
self.days_to_release = days_to_release
self.job_id = job_id
self.schedule_only_with_changes = schedule_only_with_changes
self.start_hours = start_hours
self.start_minutes = start_minutes
self.time_zone_id = time_zone_id
class ReleaseStartEnvironmentMetadata(Model):
"""
:param definition_environment_id: Sets release definition environment id.
:type definition_environment_id: int
:param variables: Sets list of environments variables to be overridden at deployment time.
:type variables: dict
"""
_attribute_map = {
'definition_environment_id': {'key': 'definitionEnvironmentId', 'type': 'int'},
'variables': {'key': 'variables', 'type': '{ConfigurationVariableValue}'}
}
def __init__(self, definition_environment_id=None, variables=None):
super(ReleaseStartEnvironmentMetadata, self).__init__()
self.definition_environment_id = definition_environment_id
self.variables = variables
class ReleaseStartMetadata(Model):
"""
:param artifacts: Sets list of artifact to create a release.
:type artifacts: list of :class:`ArtifactMetadata <azure.devops.v7_1.release.models.ArtifactMetadata>`
:param created_for: Optionally provide a requestor identity
:type created_for: str
:param definition_id: Sets definition Id to create a release.
:type definition_id: int
:param description: Sets description to create a release.
:type description: str
:param environments_metadata: Sets list of environments meta data.
:type environments_metadata: list of :class:`ReleaseStartEnvironmentMetadata <azure.devops.v7_1.release.models.ReleaseStartEnvironmentMetadata>`
:param is_draft: Sets 'true' to create release in draft mode, 'false' otherwise.
:type is_draft: bool
:param manual_environments: Sets list of environments to manual as condition.
:type manual_environments: list of str
:param properties:
:type properties: :class:`object <azure.devops.v7_1.release.models.object>`
:param reason: Sets reason to create a release.
:type reason: object
:param variables: Sets list of release variables to be overridden at deployment time.
:type variables: dict
"""
_attribute_map = {
'artifacts': {'key': 'artifacts', 'type': '[ArtifactMetadata]'},
'created_for': {'key': 'createdFor', 'type': 'str'},
'definition_id': {'key': 'definitionId', 'type': 'int'},
'description': {'key': 'description', 'type': 'str'},
'environments_metadata': {'key': 'environmentsMetadata', 'type': '[ReleaseStartEnvironmentMetadata]'},
'is_draft': {'key': 'isDraft', 'type': 'bool'},
'manual_environments': {'key': 'manualEnvironments', 'type': '[str]'},
'properties': {'key': 'properties', 'type': 'object'},
'reason': {'key': 'reason', 'type': 'object'},
'variables': {'key': 'variables', 'type': '{ConfigurationVariableValue}'}
}
def __init__(self, artifacts=None, created_for=None, definition_id=None, description=None, environments_metadata=None, is_draft=None, manual_environments=None, properties=None, reason=None, variables=None):
super(ReleaseStartMetadata, self).__init__()
self.artifacts = artifacts
self.created_for = created_for
self.definition_id = definition_id
self.description = description
self.environments_metadata = environments_metadata
self.is_draft = is_draft
self.manual_environments = manual_environments
self.properties = properties
self.reason = reason
self.variables = variables
class ReleaseTask(Model):
"""
:param agent_name: Agent name on which task executed.
:type agent_name: str
:param date_ended:
:type date_ended: datetime
:param date_started:
:type date_started: datetime
:param finish_time: Finish time of the release task.
:type finish_time: datetime
:param id: ID of the release task.
:type id: int
:param issues: List of issues occurred while execution of task.
:type issues: list of :class:`Issue <azure.devops.v7_1.release.models.Issue>`
:param line_count: Number of lines log release task has.
:type line_count: long
:param log_url: Log URL of the task.
:type log_url: str
:param name: Name of the task.
:type name: str
:param percent_complete: Task execution complete precent.
:type percent_complete: int
:param rank: Rank of the release task.
:type rank: int
:param result_code: Result code of the task.
:type result_code: str
:param start_time: ID of the release task.
:type start_time: datetime
:param status: Status of release task.
:type status: object
:param task: Workflow task reference.
:type task: :class:`WorkflowTaskReference <azure.devops.v7_1.release.models.WorkflowTaskReference>`
:param timeline_record_id: Timeline record ID of the release task.
:type timeline_record_id: str
"""
_attribute_map = {
'agent_name': {'key': 'agentName', 'type': 'str'},
'date_ended': {'key': 'dateEnded', 'type': 'iso-8601'},
'date_started': {'key': 'dateStarted', 'type': 'iso-8601'},
'finish_time': {'key': 'finishTime', 'type': 'iso-8601'},
'id': {'key': 'id', 'type': 'int'},
'issues': {'key': 'issues', 'type': '[Issue]'},
'line_count': {'key': 'lineCount', 'type': 'long'},
'log_url': {'key': 'logUrl', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'percent_complete': {'key': 'percentComplete', 'type': 'int'},
'rank': {'key': 'rank', 'type': 'int'},
'result_code': {'key': 'resultCode', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'status': {'key': 'status', 'type': 'object'},
'task': {'key': 'task', 'type': 'WorkflowTaskReference'},
'timeline_record_id': {'key': 'timelineRecordId', 'type': 'str'}
}
def __init__(self, agent_name=None, date_ended=None, date_started=None, finish_time=None, id=None, issues=None, line_count=None, log_url=None, name=None, percent_complete=None, rank=None, result_code=None, start_time=None, status=None, task=None, timeline_record_id=None):
super(ReleaseTask, self).__init__()
self.agent_name = agent_name
self.date_ended = date_ended
self.date_started = date_started
self.finish_time = finish_time
self.id = id
self.issues = issues
self.line_count = line_count
self.log_url = log_url
self.name = name
self.percent_complete = percent_complete
self.rank = rank
self.result_code = result_code
self.start_time = start_time
self.status = status
self.task = task
self.timeline_record_id = timeline_record_id
class ReleaseTaskAttachment(Model):
"""
:param _links: Reference links of task.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.release.models.ReferenceLinks>`
:param created_on: Data and time when it created.
:type created_on: datetime
:param modified_by: Identity who modified.
:type modified_by: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param modified_on: Data and time when modified.
:type modified_on: datetime
:param name: Name of the task attachment.
:type name: str
:param record_id: Record ID of the task.
:type record_id: str
:param timeline_id: Timeline ID of the task.
:type timeline_id: str
:param type: Type of task attachment.
:type type: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'modified_by': {'key': 'modifiedBy', 'type': 'IdentityRef'},
'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'record_id': {'key': 'recordId', 'type': 'str'},
'timeline_id': {'key': 'timelineId', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'}
}
def __init__(self, _links=None, created_on=None, modified_by=None, modified_on=None, name=None, record_id=None, timeline_id=None, type=None):
super(ReleaseTaskAttachment, self).__init__()
self._links = _links
self.created_on = created_on
self.modified_by = modified_by
self.modified_on = modified_on
self.name = name
self.record_id = record_id
self.timeline_id = timeline_id
self.type = type
class ReleaseUpdateMetadata(Model):
"""
:param comment: Sets comment for release.
:type comment: str
:param keep_forever: Set 'true' to exclude the release from retention policies.
:type keep_forever: bool
:param manual_environments: Sets list of manual environments.
:type manual_environments: list of str
:param name: Sets name of the release.
:type name: str
:param status: Sets status of the release.
:type status: object
"""
_attribute_map = {
'comment': {'key': 'comment', 'type': 'str'},
'keep_forever': {'key': 'keepForever', 'type': 'bool'},
'manual_environments': {'key': 'manualEnvironments', 'type': '[str]'},
'name': {'key': 'name', 'type': 'str'},
'status': {'key': 'status', 'type': 'object'}
}
def __init__(self, comment=None, keep_forever=None, manual_environments=None, name=None, status=None):
super(ReleaseUpdateMetadata, self).__init__()
self.comment = comment
self.keep_forever = keep_forever
self.manual_environments = manual_environments
self.name = name
self.status = status
class ReleaseWorkItemRef(Model):
"""
:param assignee:
:type assignee: str
:param id: Gets or sets the ID.
:type id: str
:param provider: Gets or sets the provider.
:type provider: str
:param state: Gets or sets the state.
:type state: str
:param title: Gets or sets the title.
:type title: str
:param type: Gets or sets the type.
:type type: str
:param url: Gets or sets the workitem url.
:type url: str
"""
_attribute_map = {
'assignee': {'key': 'assignee', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'provider': {'key': 'provider', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'title': {'key': 'title', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, assignee=None, id=None, provider=None, state=None, title=None, type=None, url=None):
super(ReleaseWorkItemRef, self).__init__()
self.assignee = assignee
self.id = id
self.provider = provider
self.state = state
self.title = title
self.type = type
self.url = url
class RetentionPolicy(Model):
"""
:param days_to_keep: Indicates the number of days to keep deployment.
:type days_to_keep: int
"""
_attribute_map = {
'days_to_keep': {'key': 'daysToKeep', 'type': 'int'}
}
def __init__(self, days_to_keep=None):
super(RetentionPolicy, self).__init__()
self.days_to_keep = days_to_keep
class RetentionSettings(Model):
"""
:param days_to_keep_deleted_releases: Number of days to keep deleted releases.
:type days_to_keep_deleted_releases: int
:param default_environment_retention_policy: Specifies the default environment retention policy.
:type default_environment_retention_policy: :class:`EnvironmentRetentionPolicy <azure.devops.v7_1.release.models.EnvironmentRetentionPolicy>`
:param maximum_environment_retention_policy: Specifies the maximum environment retention policy.
:type maximum_environment_retention_policy: :class:`EnvironmentRetentionPolicy <azure.devops.v7_1.release.models.EnvironmentRetentionPolicy>`
"""
_attribute_map = {
'days_to_keep_deleted_releases': {'key': 'daysToKeepDeletedReleases', 'type': 'int'},
'default_environment_retention_policy': {'key': 'defaultEnvironmentRetentionPolicy', 'type': 'EnvironmentRetentionPolicy'},
'maximum_environment_retention_policy': {'key': 'maximumEnvironmentRetentionPolicy', 'type': 'EnvironmentRetentionPolicy'}
}
def __init__(self, days_to_keep_deleted_releases=None, default_environment_retention_policy=None, maximum_environment_retention_policy=None):
super(RetentionSettings, self).__init__()
self.days_to_keep_deleted_releases = days_to_keep_deleted_releases
self.default_environment_retention_policy = default_environment_retention_policy
self.maximum_environment_retention_policy = maximum_environment_retention_policy
class SourcePullRequestVersion(Model):
"""
:param iteration_id: Pull Request Iteration Id for which the release will publish status.
:type iteration_id: str
:param pull_request_id: Pull Request Id for which the release will publish status.
:type pull_request_id: str
:param pull_request_merged_at: Date and time of the pull request merge creation. It is required to keep timeline record of Releases created by pull request.
:type pull_request_merged_at: datetime
:param source_branch: Source branch of the Pull Request.
:type source_branch: str
:param source_branch_commit_id: Source branch commit Id of the Pull Request for which the release will publish status.
:type source_branch_commit_id: str
:param target_branch: Target branch of the Pull Request.
:type target_branch: str
"""
_attribute_map = {
'iteration_id': {'key': 'iterationId', 'type': 'str'},
'pull_request_id': {'key': 'pullRequestId', 'type': 'str'},
'pull_request_merged_at': {'key': 'pullRequestMergedAt', 'type': 'iso-8601'},
'source_branch': {'key': 'sourceBranch', 'type': 'str'},
'source_branch_commit_id': {'key': 'sourceBranchCommitId', 'type': 'str'},
'target_branch': {'key': 'targetBranch', 'type': 'str'}
}
def __init__(self, iteration_id=None, pull_request_id=None, pull_request_merged_at=None, source_branch=None, source_branch_commit_id=None, target_branch=None):
super(SourcePullRequestVersion, self).__init__()
self.iteration_id = iteration_id
self.pull_request_id = pull_request_id
self.pull_request_merged_at = pull_request_merged_at
self.source_branch = source_branch
self.source_branch_commit_id = source_branch_commit_id
self.target_branch = target_branch
class SummaryMailSection(Model):
"""
:param html_content: Html content of summary mail.
:type html_content: str
:param rank: Rank of the summary mail.
:type rank: int
:param section_type: Summary mail section type. MailSectionType has section types.
:type section_type: object
:param title: Title of the summary mail.
:type title: str
"""
_attribute_map = {
'html_content': {'key': 'htmlContent', 'type': 'str'},
'rank': {'key': 'rank', 'type': 'int'},
'section_type': {'key': 'sectionType', 'type': 'object'},
'title': {'key': 'title', 'type': 'str'}
}
def __init__(self, html_content=None, rank=None, section_type=None, title=None):
super(SummaryMailSection, self).__init__()
self.html_content = html_content
self.rank = rank
self.section_type = section_type
self.title = title
class TaskInputDefinitionBase(Model):
"""
:param aliases:
:type aliases: list of str
:param default_value:
:type default_value: str
:param group_name:
:type group_name: str
:param help_mark_down:
:type help_mark_down: str
:param label:
:type label: str
:param name:
:type name: str
:param options:
:type options: dict
:param properties:
:type properties: dict
:param required:
:type required: bool
:param type:
:type type: str
:param validation:
:type validation: :class:`TaskInputValidation <azure.devops.v7_1.microsoft._team_foundation._distributed_task._common._contracts.models.TaskInputValidation>`
:param visible_rule:
:type visible_rule: str
"""
_attribute_map = {
'aliases': {'key': 'aliases', 'type': '[str]'},
'default_value': {'key': 'defaultValue', 'type': 'str'},
'group_name': {'key': 'groupName', 'type': 'str'},
'help_mark_down': {'key': 'helpMarkDown', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'options': {'key': 'options', 'type': '{str}'},
'properties': {'key': 'properties', 'type': '{str}'},
'required': {'key': 'required', 'type': 'bool'},
'type': {'key': 'type', 'type': 'str'},
'validation': {'key': 'validation', 'type': 'TaskInputValidation'},
'visible_rule': {'key': 'visibleRule', 'type': 'str'}
}
def __init__(self, aliases=None, default_value=None, group_name=None, help_mark_down=None, label=None, name=None, options=None, properties=None, required=None, type=None, validation=None, visible_rule=None):
super(TaskInputDefinitionBase, self).__init__()
self.aliases = aliases
self.default_value = default_value
self.group_name = group_name
self.help_mark_down = help_mark_down
self.label = label
self.name = name
self.options = options
self.properties = properties
self.required = required
self.type = type
self.validation = validation
self.visible_rule = visible_rule
class TaskInputValidation(Model):
"""
:param expression: Conditional expression
:type expression: str
:param message: Message explaining how user can correct if validation fails
:type message: str
"""
_attribute_map = {
'expression': {'key': 'expression', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'}
}
def __init__(self, expression=None, message=None):
super(TaskInputValidation, self).__init__()
self.expression = expression
self.message = message
class TaskSourceDefinitionBase(Model):
"""
:param auth_key:
:type auth_key: str
:param endpoint:
:type endpoint: str
:param key_selector:
:type key_selector: str
:param selector:
:type selector: str
:param target:
:type target: str
"""
_attribute_map = {
'auth_key': {'key': 'authKey', 'type': 'str'},
'endpoint': {'key': 'endpoint', 'type': 'str'},
'key_selector': {'key': 'keySelector', 'type': 'str'},
'selector': {'key': 'selector', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'}
}
def __init__(self, auth_key=None, endpoint=None, key_selector=None, selector=None, target=None):
super(TaskSourceDefinitionBase, self).__init__()
self.auth_key = auth_key
self.endpoint = endpoint
self.key_selector = key_selector
self.selector = selector
self.target = target
class VariableGroup(Model):
"""
:param created_by: Gets or sets the identity who created.
:type created_by: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param created_on: Gets date on which it got created.
:type created_on: datetime
:param description: Gets or sets description.
:type description: str
:param id: Gets the unique identifier of this field.
:type id: int
:param is_shared: Denotes if a variable group is shared with other project or not.
:type is_shared: bool
:param modified_by: Gets or sets the identity who modified.
:type modified_by: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param modified_on: Gets date on which it got modified.
:type modified_on: datetime
:param name: Gets or sets name.
:type name: str
:param provider_data: Gets or sets provider data.
:type provider_data: :class:`VariableGroupProviderData <azure.devops.v7_1.release.models.VariableGroupProviderData>`
:param type: Gets or sets type.
:type type: str
:param variable_group_project_references: all project references where the variable group is shared with other projects.
:type variable_group_project_references: list of :class:`VariableGroupProjectReference <azure.devops.v7_1.release.models.VariableGroupProjectReference>`
:param variables: Gets and sets the dictionary of variables.
:type variables: dict
"""
_attribute_map = {
'created_by': {'key': 'createdBy', 'type': 'IdentityRef'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'description': {'key': 'description', 'type': 'str'},
'id': {'key': 'id', 'type': 'int'},
'is_shared': {'key': 'isShared', 'type': 'bool'},
'modified_by': {'key': 'modifiedBy', 'type': 'IdentityRef'},
'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'provider_data': {'key': 'providerData', 'type': 'VariableGroupProviderData'},
'type': {'key': 'type', 'type': 'str'},
'variable_group_project_references': {'key': 'variableGroupProjectReferences', 'type': '[VariableGroupProjectReference]'},
'variables': {'key': 'variables', 'type': '{VariableValue}'}
}
def __init__(self, created_by=None, created_on=None, description=None, id=None, is_shared=None, modified_by=None, modified_on=None, name=None, provider_data=None, type=None, variable_group_project_references=None, variables=None):
super(VariableGroup, self).__init__()
self.created_by = created_by
self.created_on = created_on
self.description = description
self.id = id
self.is_shared = is_shared
self.modified_by = modified_by
self.modified_on = modified_on
self.name = name
self.provider_data = provider_data
self.type = type
self.variable_group_project_references = variable_group_project_references
self.variables = variables
class VariableGroupProjectReference(Model):
"""
A variable group reference is a shallow reference to variable group.
:param description: Gets or sets description of the variable group.
:type description: str
:param name: Gets or sets name of the variable group.
:type name: str
:param project_reference: Gets or sets project reference of the variable group.
:type project_reference: :class:`ProjectReference <azure.devops.v7_1.release.models.ProjectReference>`
"""
_attribute_map = {
'description': {'key': 'description', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'project_reference': {'key': 'projectReference', 'type': 'ProjectReference'}
}
def __init__(self, description=None, name=None, project_reference=None):
super(VariableGroupProjectReference, self).__init__()
self.description = description
self.name = name
self.project_reference = project_reference
class VariableGroupProviderData(Model):
"""
"""
_attribute_map = {
}
def __init__(self):
super(VariableGroupProviderData, self).__init__()
class VariableValue(Model):
"""
:param is_read_only: Gets or sets if the variable is read only or not.
:type is_read_only: bool
:param is_secret: Gets or sets as the variable is secret or not.
:type is_secret: bool
:param value: Gets or sets the value.
:type value: str
"""
_attribute_map = {
'is_read_only': {'key': 'isReadOnly', 'type': 'bool'},
'is_secret': {'key': 'isSecret', 'type': 'bool'},
'value': {'key': 'value', 'type': 'str'}
}
def __init__(self, is_read_only=None, is_secret=None, value=None):
super(VariableValue, self).__init__()
self.is_read_only = is_read_only
self.is_secret = is_secret
self.value = value
class WorkflowTask(Model):
"""
:param always_run: Gets or sets as the task always run or not.
:type always_run: bool
:param condition: Gets or sets the task condition.
:type condition: str
:param continue_on_error: Gets or sets as the task continue run on error or not.
:type continue_on_error: bool
:param definition_type: Gets or sets the task definition type. Example:- 'Agent', DeploymentGroup', 'Server' or 'ServerGate'.
:type definition_type: str
:param enabled: Gets or sets as the task enabled or not.
:type enabled: bool
:param environment: Gets or sets the task environment variables.
:type environment: dict
:param inputs: Gets or sets the task inputs.
:type inputs: dict
:param name: Gets or sets the name of the task.
:type name: str
:param override_inputs: Gets or sets the task override inputs.
:type override_inputs: dict
:param ref_name: Gets or sets the reference name of the task.
:type ref_name: str
:param retry_count_on_task_failure: Gets or sets the task retryCount.
:type retry_count_on_task_failure: int
:param task_id: Gets or sets the ID of the task.
:type task_id: str
:param timeout_in_minutes: Gets or sets the task timeout.
:type timeout_in_minutes: int
:param version: Gets or sets the version of the task.
:type version: str
"""
_attribute_map = {
'always_run': {'key': 'alwaysRun', 'type': 'bool'},
'condition': {'key': 'condition', 'type': 'str'},
'continue_on_error': {'key': 'continueOnError', 'type': 'bool'},
'definition_type': {'key': 'definitionType', 'type': 'str'},
'enabled': {'key': 'enabled', 'type': 'bool'},
'environment': {'key': 'environment', 'type': '{str}'},
'inputs': {'key': 'inputs', 'type': '{str}'},
'name': {'key': 'name', 'type': 'str'},
'override_inputs': {'key': 'overrideInputs', 'type': '{str}'},
'ref_name': {'key': 'refName', 'type': 'str'},
'retry_count_on_task_failure': {'key': 'retryCountOnTaskFailure', 'type': 'int'},
'task_id': {'key': 'taskId', 'type': 'str'},
'timeout_in_minutes': {'key': 'timeoutInMinutes', 'type': 'int'},
'version': {'key': 'version', 'type': 'str'}
}
def __init__(self, always_run=None, condition=None, continue_on_error=None, definition_type=None, enabled=None, environment=None, inputs=None, name=None, override_inputs=None, ref_name=None, retry_count_on_task_failure=None, task_id=None, timeout_in_minutes=None, version=None):
super(WorkflowTask, self).__init__()
self.always_run = always_run
self.condition = condition
self.continue_on_error = continue_on_error
self.definition_type = definition_type
self.enabled = enabled
self.environment = environment
self.inputs = inputs
self.name = name
self.override_inputs = override_inputs
self.ref_name = ref_name
self.retry_count_on_task_failure = retry_count_on_task_failure
self.task_id = task_id
self.timeout_in_minutes = timeout_in_minutes
self.version = version
class WorkflowTaskReference(Model):
"""
:param id: Task identifier.
:type id: str
:param name: Name of the task.
:type name: str
:param version: Version of the task.
:type version: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'}
}
def __init__(self, id=None, name=None, version=None):
super(WorkflowTaskReference, self).__init__()
self.id = id
self.name = name
self.version = version
class ReleaseDefinition(ReleaseDefinitionShallowReference):
"""
:param _links: Gets the links to related resources, APIs, and views for the release definition.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.release.models.ReferenceLinks>`
:param id: Gets the unique identifier of release definition.
:type id: int
:param name: Gets or sets the name of the release definition.
:type name: str
:param path: Gets or sets the path of the release definition.
:type path: str
:param project_reference: Gets or sets project reference.
:type project_reference: :class:`ProjectReference <azure.devops.v7_1.release.models.ProjectReference>`
:param url: Gets the REST API url to access the release definition.
:type url: str
:param artifacts: Gets or sets the list of artifacts.
:type artifacts: list of :class:`Artifact <azure.devops.v7_1.release.models.Artifact>`
:param comment: Gets or sets comment.
:type comment: str
:param created_by: Gets or sets the identity who created.
:type created_by: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param created_on: Gets date on which it got created.
:type created_on: datetime
:param description: Gets or sets the description.
:type description: str
:param environments: Gets or sets the list of environments.
:type environments: list of :class:`ReleaseDefinitionEnvironment <azure.devops.v7_1.release.models.ReleaseDefinitionEnvironment>`
:param is_deleted: Whether release definition is deleted.
:type is_deleted: bool
:param last_release: Gets the reference of last release.
:type last_release: :class:`ReleaseReference <azure.devops.v7_1.release.models.ReleaseReference>`
:param modified_by: Gets or sets the identity who modified.
:type modified_by: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param modified_on: Gets date on which it got modified.
:type modified_on: datetime
:param pipeline_process: Gets or sets pipeline process.
:type pipeline_process: :class:`PipelineProcess <azure.devops.v7_1.release.models.PipelineProcess>`
:param properties: Gets or sets properties.
:type properties: :class:`object <azure.devops.v7_1.release.models.object>`
:param release_name_format: Gets or sets the release name format.
:type release_name_format: str
:param retention_policy:
:type retention_policy: :class:`RetentionPolicy <azure.devops.v7_1.release.models.RetentionPolicy>`
:param revision: Gets the revision number.
:type revision: int
:param source: Gets or sets source of release definition.
:type source: object
:param tags: Gets or sets list of tags.
:type tags: list of str
:param triggers: Gets or sets the list of triggers.
:type triggers: list of :class:`object <azure.devops.v7_1.release.models.object>`
:param variable_groups: Gets or sets the list of variable groups.
:type variable_groups: list of int
:param variables: Gets or sets the dictionary of variables.
:type variables: dict
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'path': {'key': 'path', 'type': 'str'},
'project_reference': {'key': 'projectReference', 'type': 'ProjectReference'},
'url': {'key': 'url', 'type': 'str'},
'artifacts': {'key': 'artifacts', 'type': '[Artifact]'},
'comment': {'key': 'comment', 'type': 'str'},
'created_by': {'key': 'createdBy', 'type': 'IdentityRef'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'description': {'key': 'description', 'type': 'str'},
'environments': {'key': 'environments', 'type': '[ReleaseDefinitionEnvironment]'},
'is_deleted': {'key': 'isDeleted', 'type': 'bool'},
'last_release': {'key': 'lastRelease', 'type': 'ReleaseReference'},
'modified_by': {'key': 'modifiedBy', 'type': 'IdentityRef'},
'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
'pipeline_process': {'key': 'pipelineProcess', 'type': 'PipelineProcess'},
'properties': {'key': 'properties', 'type': 'object'},
'release_name_format': {'key': 'releaseNameFormat', 'type': 'str'},
'retention_policy': {'key': 'retentionPolicy', 'type': 'RetentionPolicy'},
'revision': {'key': 'revision', 'type': 'int'},
'source': {'key': 'source', 'type': 'object'},
'tags': {'key': 'tags', 'type': '[str]'},
'triggers': {'key': 'triggers', 'type': '[object]'},
'variable_groups': {'key': 'variableGroups', 'type': '[int]'},
'variables': {'key': 'variables', 'type': '{ConfigurationVariableValue}'}
}
def __init__(self, _links=None, id=None, name=None, path=None, project_reference=None, url=None, artifacts=None, comment=None, created_by=None, created_on=None, description=None, environments=None, is_deleted=None, last_release=None, modified_by=None, modified_on=None, pipeline_process=None, properties=None, release_name_format=None, retention_policy=None, revision=None, source=None, tags=None, triggers=None, variable_groups=None, variables=None):
super(ReleaseDefinition, self).__init__(_links=_links, id=id, name=name, path=path, project_reference=project_reference, url=url)
self.artifacts = artifacts
self.comment = comment
self.created_by = created_by
self.created_on = created_on
self.description = description
self.environments = environments
self.is_deleted = is_deleted
self.last_release = last_release
self.modified_by = modified_by
self.modified_on = modified_on
self.pipeline_process = pipeline_process
self.properties = properties
self.release_name_format = release_name_format
self.retention_policy = retention_policy
self.revision = revision
self.source = source
self.tags = tags
self.triggers = triggers
self.variable_groups = variable_groups
self.variables = variables
class ReleaseDefinitionApprovalStep(ReleaseDefinitionEnvironmentStep):
"""
:param id: ID of the approval or deploy step.
:type id: int
:param approver: Gets and sets the approver.
:type approver: :class:`IdentityRef <azure.devops.v7_1.release.models.IdentityRef>`
:param is_automated: Indicates whether the approval automated.
:type is_automated: bool
:param is_notification_on: Indicates whether the approval notification set.
:type is_notification_on: bool
:param rank: Gets or sets the rank of approval step.
:type rank: int
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'approver': {'key': 'approver', 'type': 'IdentityRef'},
'is_automated': {'key': 'isAutomated', 'type': 'bool'},
'is_notification_on': {'key': 'isNotificationOn', 'type': 'bool'},
'rank': {'key': 'rank', 'type': 'int'}
}
def __init__(self, id=None, approver=None, is_automated=None, is_notification_on=None, rank=None):
super(ReleaseDefinitionApprovalStep, self).__init__(id=id)
self.approver = approver
self.is_automated = is_automated
self.is_notification_on = is_notification_on
self.rank = rank
class ReleaseDefinitionDeployStep(ReleaseDefinitionEnvironmentStep):
"""
:param id: ID of the approval or deploy step.
:type id: int
:param tasks: The list of steps for this definition.
:type tasks: list of :class:`WorkflowTask <azure.devops.v7_1.release.models.WorkflowTask>`
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'tasks': {'key': 'tasks', 'type': '[WorkflowTask]'}
}
def __init__(self, id=None, tasks=None):
super(ReleaseDefinitionDeployStep, self).__init__(id=id)
self.tasks = tasks
__all__ = [
'AgentArtifactDefinition',
'ApprovalOptions',
'Artifact',
'ArtifactMetadata',
'ArtifactSourceReference',
'ArtifactTriggerConfiguration',
'ArtifactTypeDefinition',
'ArtifactVersion',
'ArtifactVersionQueryResult',
'AuthorizationHeader',
'AutoTriggerIssue',
'BuildVersion',
'ComplianceSettings',
'Condition',
'ConfigurationVariableValue',
'DataSourceBindingBase',
'DefinitionEnvironmentReference',
'Deployment',
'DeploymentAttempt',
'DeploymentJob',
'DeploymentQueryParameters',
'EmailRecipients',
'EnvironmentExecutionPolicy',
'EnvironmentOptions',
'EnvironmentRetentionPolicy',
'EnvironmentTrigger',
'FavoriteItem',
'Folder',
'GateUpdateMetadata',
'GraphSubjectBase',
'Change',
'IdentityRef',
'IgnoredGate',
'InputDescriptor',
'InputValidation',
'InputValue',
'InputValues',
'InputValuesError',
'InputValuesQuery',
'Issue',
'MailMessage',
'ManualIntervention',
'ManualInterventionUpdateMetadata',
'Metric',
'OrgPipelineReleaseSettings',
'OrgPipelineReleaseSettingsUpdateParameters',
'PipelineProcess',
'ProcessParameters',
'ProjectPipelineReleaseSettings',
'ProjectPipelineReleaseSettingsUpdateParameters',
'ProjectReference',
'QueuedReleaseData',
'ReferenceLinks',
'Release',
'ReleaseApproval',
'ReleaseApprovalHistory',
'ReleaseCondition',
'ReleaseDefinitionApprovals',
'ReleaseDefinitionEnvironment',
'ReleaseDefinitionEnvironmentStep',
'ReleaseDefinitionEnvironmentSummary',
'ReleaseDefinitionEnvironmentTemplate',
'ReleaseDefinitionGate',
'ReleaseDefinitionGatesOptions',
'ReleaseDefinitionGatesStep',
'ReleaseDefinitionRevision',
'ReleaseDefinitionShallowReference',
'ReleaseDefinitionSummary',
'ReleaseDefinitionUndeleteParameter',
'ReleaseDeployPhase',
'ReleaseEnvironment',
'ReleaseEnvironmentShallowReference',
'ReleaseEnvironmentUpdateMetadata',
'ReleaseGates',
'ReleaseReference',
'ReleaseRevision',
'ReleaseSettings',
'ReleaseShallowReference',
'ReleaseSchedule',
'ReleaseStartEnvironmentMetadata',
'ReleaseStartMetadata',
'ReleaseTask',
'ReleaseTaskAttachment',
'ReleaseUpdateMetadata',
'ReleaseWorkItemRef',
'RetentionPolicy',
'RetentionSettings',
'SourcePullRequestVersion',
'SummaryMailSection',
'TaskInputDefinitionBase',
'TaskInputValidation',
'TaskSourceDefinitionBase',
'VariableGroup',
'VariableGroupProjectReference',
'VariableGroupProviderData',
'VariableValue',
'WorkflowTask',
'WorkflowTaskReference',
'ReleaseDefinition',
'ReleaseDefinitionApprovalStep',
'ReleaseDefinitionDeployStep',
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/release/models.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/release/models.py",
"repo_id": "azure-devops-python-api",
"token_count": 66331
}
| 368 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest import Serializer, Deserializer
from ...client import Client
from . import models
class ServiceHooksClient(Client):
"""ServiceHooks
:param str base_url: Service URL
:param Authentication creds: Authenticated credentials.
"""
def __init__(self, base_url=None, creds=None):
super(ServiceHooksClient, self).__init__(base_url, creds)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
resource_area_identifier = None
def get_consumer_action(self, consumer_id, consumer_action_id, publisher_id=None):
"""GetConsumerAction.
[Preview API] Get details about a specific consumer action.
:param str consumer_id: ID for a consumer.
:param str consumer_action_id: ID for a consumerActionId.
:param str publisher_id:
:rtype: :class:`<ConsumerAction> <azure.devops.v7_1.service_hooks.models.ConsumerAction>`
"""
route_values = {}
if consumer_id is not None:
route_values['consumerId'] = self._serialize.url('consumer_id', consumer_id, 'str')
if consumer_action_id is not None:
route_values['consumerActionId'] = self._serialize.url('consumer_action_id', consumer_action_id, 'str')
query_parameters = {}
if publisher_id is not None:
query_parameters['publisherId'] = self._serialize.query('publisher_id', publisher_id, 'str')
response = self._send(http_method='GET',
location_id='c3428e90-7a69-4194-8ed8-0f153185ee0d',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('ConsumerAction', response)
def list_consumer_actions(self, consumer_id, publisher_id=None):
"""ListConsumerActions.
[Preview API] Get a list of consumer actions for a specific consumer.
:param str consumer_id: ID for a consumer.
:param str publisher_id:
:rtype: [ConsumerAction]
"""
route_values = {}
if consumer_id is not None:
route_values['consumerId'] = self._serialize.url('consumer_id', consumer_id, 'str')
query_parameters = {}
if publisher_id is not None:
query_parameters['publisherId'] = self._serialize.query('publisher_id', publisher_id, 'str')
response = self._send(http_method='GET',
location_id='c3428e90-7a69-4194-8ed8-0f153185ee0d',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[ConsumerAction]', self._unwrap_collection(response))
def get_consumer(self, consumer_id, publisher_id=None):
"""GetConsumer.
[Preview API] Get a specific consumer service. Optionally filter out consumer actions that do not support any event types for the specified publisher.
:param str consumer_id: ID for a consumer.
:param str publisher_id:
:rtype: :class:`<Consumer> <azure.devops.v7_1.service_hooks.models.Consumer>`
"""
route_values = {}
if consumer_id is not None:
route_values['consumerId'] = self._serialize.url('consumer_id', consumer_id, 'str')
query_parameters = {}
if publisher_id is not None:
query_parameters['publisherId'] = self._serialize.query('publisher_id', publisher_id, 'str')
response = self._send(http_method='GET',
location_id='4301c514-5f34-4f5d-a145-f0ea7b5b7d19',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('Consumer', response)
def list_consumers(self, publisher_id=None):
"""ListConsumers.
[Preview API] Get a list of available service hook consumer services. Optionally filter by consumers that support at least one event type from the specific publisher.
:param str publisher_id:
:rtype: [Consumer]
"""
query_parameters = {}
if publisher_id is not None:
query_parameters['publisherId'] = self._serialize.query('publisher_id', publisher_id, 'str')
response = self._send(http_method='GET',
location_id='4301c514-5f34-4f5d-a145-f0ea7b5b7d19',
version='7.1-preview.1',
query_parameters=query_parameters)
return self._deserialize('[Consumer]', self._unwrap_collection(response))
def get_subscription_diagnostics(self, subscription_id):
"""GetSubscriptionDiagnostics.
[Preview API]
:param str subscription_id:
:rtype: :class:`<SubscriptionDiagnostics> <azure.devops.v7_1.service_hooks.models.SubscriptionDiagnostics>`
"""
route_values = {}
if subscription_id is not None:
route_values['subscriptionId'] = self._serialize.url('subscription_id', subscription_id, 'str')
response = self._send(http_method='GET',
location_id='3b36bcb5-02ad-43c6-bbfa-6dfc6f8e9d68',
version='7.1-preview.1',
route_values=route_values)
return self._deserialize('SubscriptionDiagnostics', response)
def update_subscription_diagnostics(self, update_parameters, subscription_id):
"""UpdateSubscriptionDiagnostics.
[Preview API]
:param :class:`<UpdateSubscripitonDiagnosticsParameters> <azure.devops.v7_1.service_hooks.models.UpdateSubscripitonDiagnosticsParameters>` update_parameters:
:param str subscription_id:
:rtype: :class:`<SubscriptionDiagnostics> <azure.devops.v7_1.service_hooks.models.SubscriptionDiagnostics>`
"""
route_values = {}
if subscription_id is not None:
route_values['subscriptionId'] = self._serialize.url('subscription_id', subscription_id, 'str')
content = self._serialize.body(update_parameters, 'UpdateSubscripitonDiagnosticsParameters')
response = self._send(http_method='PUT',
location_id='3b36bcb5-02ad-43c6-bbfa-6dfc6f8e9d68',
version='7.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('SubscriptionDiagnostics', response)
def get_event_type(self, publisher_id, event_type_id):
"""GetEventType.
[Preview API] Get a specific event type.
:param str publisher_id: ID for a publisher.
:param str event_type_id:
:rtype: :class:`<EventTypeDescriptor> <azure.devops.v7_1.service_hooks.models.EventTypeDescriptor>`
"""
route_values = {}
if publisher_id is not None:
route_values['publisherId'] = self._serialize.url('publisher_id', publisher_id, 'str')
if event_type_id is not None:
route_values['eventTypeId'] = self._serialize.url('event_type_id', event_type_id, 'str')
response = self._send(http_method='GET',
location_id='db4777cd-8e08-4a84-8ba3-c974ea033718',
version='7.1-preview.1',
route_values=route_values)
return self._deserialize('EventTypeDescriptor', response)
def list_event_types(self, publisher_id):
"""ListEventTypes.
[Preview API] Get the event types for a specific publisher.
:param str publisher_id: ID for a publisher.
:rtype: [EventTypeDescriptor]
"""
route_values = {}
if publisher_id is not None:
route_values['publisherId'] = self._serialize.url('publisher_id', publisher_id, 'str')
response = self._send(http_method='GET',
location_id='db4777cd-8e08-4a84-8ba3-c974ea033718',
version='7.1-preview.1',
route_values=route_values)
return self._deserialize('[EventTypeDescriptor]', self._unwrap_collection(response))
def get_notification(self, subscription_id, notification_id):
"""GetNotification.
[Preview API] Get a specific notification for a subscription.
:param str subscription_id: ID for a subscription.
:param int notification_id:
:rtype: :class:`<Notification> <azure.devops.v7_1.service_hooks.models.Notification>`
"""
route_values = {}
if subscription_id is not None:
route_values['subscriptionId'] = self._serialize.url('subscription_id', subscription_id, 'str')
if notification_id is not None:
route_values['notificationId'] = self._serialize.url('notification_id', notification_id, 'int')
response = self._send(http_method='GET',
location_id='0c62d343-21b0-4732-997b-017fde84dc28',
version='7.1-preview.1',
route_values=route_values)
return self._deserialize('Notification', response)
def get_notifications(self, subscription_id, max_results=None, status=None, result=None):
"""GetNotifications.
[Preview API] Get a list of notifications for a specific subscription. A notification includes details about the event, the request to and the response from the consumer service.
:param str subscription_id: ID for a subscription.
:param int max_results: Maximum number of notifications to return. Default is **100**.
:param str status: Get only notifications with this status.
:param str result: Get only notifications with this result type.
:rtype: [Notification]
"""
route_values = {}
if subscription_id is not None:
route_values['subscriptionId'] = self._serialize.url('subscription_id', subscription_id, 'str')
query_parameters = {}
if max_results is not None:
query_parameters['maxResults'] = self._serialize.query('max_results', max_results, 'int')
if status is not None:
query_parameters['status'] = self._serialize.query('status', status, 'str')
if result is not None:
query_parameters['result'] = self._serialize.query('result', result, 'str')
response = self._send(http_method='GET',
location_id='0c62d343-21b0-4732-997b-017fde84dc28',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[Notification]', self._unwrap_collection(response))
def query_notifications(self, query):
"""QueryNotifications.
[Preview API] Query for notifications. A notification includes details about the event, the request to and the response from the consumer service.
:param :class:`<NotificationsQuery> <azure.devops.v7_1.service_hooks.models.NotificationsQuery>` query:
:rtype: :class:`<NotificationsQuery> <azure.devops.v7_1.service_hooks.models.NotificationsQuery>`
"""
content = self._serialize.body(query, 'NotificationsQuery')
response = self._send(http_method='POST',
location_id='1a57562f-160a-4b5c-9185-905e95b39d36',
version='7.1-preview.1',
content=content)
return self._deserialize('NotificationsQuery', response)
def query_input_values(self, input_values_query, publisher_id):
"""QueryInputValues.
[Preview API]
:param :class:`<InputValuesQuery> <azure.devops.v7_1.service_hooks.models.InputValuesQuery>` input_values_query:
:param str publisher_id:
:rtype: :class:`<InputValuesQuery> <azure.devops.v7_1.service_hooks.models.InputValuesQuery>`
"""
route_values = {}
if publisher_id is not None:
route_values['publisherId'] = self._serialize.url('publisher_id', publisher_id, 'str')
content = self._serialize.body(input_values_query, 'InputValuesQuery')
response = self._send(http_method='POST',
location_id='d815d352-a566-4dc1-a3e3-fd245acf688c',
version='7.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('InputValuesQuery', response)
def get_publisher(self, publisher_id):
"""GetPublisher.
[Preview API] Get a specific service hooks publisher.
:param str publisher_id: ID for a publisher.
:rtype: :class:`<Publisher> <azure.devops.v7_1.service_hooks.models.Publisher>`
"""
route_values = {}
if publisher_id is not None:
route_values['publisherId'] = self._serialize.url('publisher_id', publisher_id, 'str')
response = self._send(http_method='GET',
location_id='1e83a210-5b53-43bc-90f0-d476a4e5d731',
version='7.1-preview.1',
route_values=route_values)
return self._deserialize('Publisher', response)
def list_publishers(self):
"""ListPublishers.
[Preview API] Get a list of publishers.
:rtype: [Publisher]
"""
response = self._send(http_method='GET',
location_id='1e83a210-5b53-43bc-90f0-d476a4e5d731',
version='7.1-preview.1')
return self._deserialize('[Publisher]', self._unwrap_collection(response))
def query_publishers(self, query):
"""QueryPublishers.
[Preview API] Query for service hook publishers.
:param :class:`<PublishersQuery> <azure.devops.v7_1.service_hooks.models.PublishersQuery>` query:
:rtype: :class:`<PublishersQuery> <azure.devops.v7_1.service_hooks.models.PublishersQuery>`
"""
content = self._serialize.body(query, 'PublishersQuery')
response = self._send(http_method='POST',
location_id='99b44a8a-65a8-4670-8f3e-e7f7842cce64',
version='7.1-preview.1',
content=content)
return self._deserialize('PublishersQuery', response)
def create_subscription(self, subscription):
"""CreateSubscription.
[Preview API] Create a subscription.
:param :class:`<Subscription> <azure.devops.v7_1.service_hooks.models.Subscription>` subscription: Subscription to be created.
:rtype: :class:`<Subscription> <azure.devops.v7_1.service_hooks.models.Subscription>`
"""
content = self._serialize.body(subscription, 'Subscription')
response = self._send(http_method='POST',
location_id='fc50d02a-849f-41fb-8af1-0a5216103269',
version='7.1-preview.1',
content=content)
return self._deserialize('Subscription', response)
def delete_subscription(self, subscription_id):
"""DeleteSubscription.
[Preview API] Delete a specific service hooks subscription.
:param str subscription_id: ID for a subscription.
"""
route_values = {}
if subscription_id is not None:
route_values['subscriptionId'] = self._serialize.url('subscription_id', subscription_id, 'str')
self._send(http_method='DELETE',
location_id='fc50d02a-849f-41fb-8af1-0a5216103269',
version='7.1-preview.1',
route_values=route_values)
def get_subscription(self, subscription_id):
"""GetSubscription.
[Preview API] Get a specific service hooks subscription.
:param str subscription_id: ID for a subscription.
:rtype: :class:`<Subscription> <azure.devops.v7_1.service_hooks.models.Subscription>`
"""
route_values = {}
if subscription_id is not None:
route_values['subscriptionId'] = self._serialize.url('subscription_id', subscription_id, 'str')
response = self._send(http_method='GET',
location_id='fc50d02a-849f-41fb-8af1-0a5216103269',
version='7.1-preview.1',
route_values=route_values)
return self._deserialize('Subscription', response)
def list_subscriptions(self, publisher_id=None, event_type=None, consumer_id=None, consumer_action_id=None):
"""ListSubscriptions.
[Preview API] Get a list of subscriptions.
:param str publisher_id: ID for a subscription.
:param str event_type: The event type to filter on (if any).
:param str consumer_id: ID for a consumer.
:param str consumer_action_id: ID for a consumerActionId.
:rtype: [Subscription]
"""
query_parameters = {}
if publisher_id is not None:
query_parameters['publisherId'] = self._serialize.query('publisher_id', publisher_id, 'str')
if event_type is not None:
query_parameters['eventType'] = self._serialize.query('event_type', event_type, 'str')
if consumer_id is not None:
query_parameters['consumerId'] = self._serialize.query('consumer_id', consumer_id, 'str')
if consumer_action_id is not None:
query_parameters['consumerActionId'] = self._serialize.query('consumer_action_id', consumer_action_id, 'str')
response = self._send(http_method='GET',
location_id='fc50d02a-849f-41fb-8af1-0a5216103269',
version='7.1-preview.1',
query_parameters=query_parameters)
return self._deserialize('[Subscription]', self._unwrap_collection(response))
def replace_subscription(self, subscription, subscription_id=None):
"""ReplaceSubscription.
[Preview API] Update a subscription. <param name="subscriptionId">ID for a subscription that you wish to update.</param>
:param :class:`<Subscription> <azure.devops.v7_1.service_hooks.models.Subscription>` subscription:
:param str subscription_id:
:rtype: :class:`<Subscription> <azure.devops.v7_1.service_hooks.models.Subscription>`
"""
route_values = {}
if subscription_id is not None:
route_values['subscriptionId'] = self._serialize.url('subscription_id', subscription_id, 'str')
content = self._serialize.body(subscription, 'Subscription')
response = self._send(http_method='PUT',
location_id='fc50d02a-849f-41fb-8af1-0a5216103269',
version='7.1-preview.1',
route_values=route_values,
content=content)
return self._deserialize('Subscription', response)
def create_subscriptions_query(self, query):
"""CreateSubscriptionsQuery.
[Preview API] Query for service hook subscriptions.
:param :class:`<SubscriptionsQuery> <azure.devops.v7_1.service_hooks.models.SubscriptionsQuery>` query:
:rtype: :class:`<SubscriptionsQuery> <azure.devops.v7_1.service_hooks.models.SubscriptionsQuery>`
"""
content = self._serialize.body(query, 'SubscriptionsQuery')
response = self._send(http_method='POST',
location_id='c7c3c1cf-9e05-4c0d-a425-a0f922c2c6ed',
version='7.1-preview.1',
content=content)
return self._deserialize('SubscriptionsQuery', response)
def create_test_notification(self, test_notification, use_real_data=None):
"""CreateTestNotification.
[Preview API] Sends a test notification. This is useful for verifying the configuration of an updated or new service hooks subscription.
:param :class:`<Notification> <azure.devops.v7_1.service_hooks.models.Notification>` test_notification:
:param bool use_real_data: Only allow testing with real data in existing subscriptions.
:rtype: :class:`<Notification> <azure.devops.v7_1.service_hooks.models.Notification>`
"""
query_parameters = {}
if use_real_data is not None:
query_parameters['useRealData'] = self._serialize.query('use_real_data', use_real_data, 'bool')
content = self._serialize.body(test_notification, 'Notification')
response = self._send(http_method='POST',
location_id='1139462c-7e27-4524-a997-31b9b73551fe',
version='7.1-preview.1',
query_parameters=query_parameters,
content=content)
return self._deserialize('Notification', response)
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/service_hooks/service_hooks_client.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/service_hooks/service_hooks_client.py",
"repo_id": "azure-devops-python-api",
"token_count": 9762
}
| 369 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class BuildDefinitionReference(Model):
"""
The build definition reference resource
:param id: ID of the build definition
:type id: int
:param name: Name of the build definition
:type name: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, id=None, name=None):
super(BuildDefinitionReference, self).__init__()
self.id = id
self.name = name
class CloneOperationCommonResponse(Model):
"""
Common Response for clone operation
:param clone_statistics: Various statistics related to the clone operation
:type clone_statistics: :class:`CloneStatistics <azure.devops.v7_1.test_plan.models.CloneStatistics>`
:param completion_date: Completion data of the operation
:type completion_date: datetime
:param creation_date: Creation data of the operation
:type creation_date: datetime
:param links: Reference links
:type links: :class:`ReferenceLinks <azure.devops.v7_1.test_plan.models.ReferenceLinks>`
:param message: Message related to the job
:type message: str
:param op_id: Clone operation Id
:type op_id: int
:param state: Clone operation state
:type state: object
"""
_attribute_map = {
'clone_statistics': {'key': 'cloneStatistics', 'type': 'CloneStatistics'},
'completion_date': {'key': 'completionDate', 'type': 'iso-8601'},
'creation_date': {'key': 'creationDate', 'type': 'iso-8601'},
'links': {'key': 'links', 'type': 'ReferenceLinks'},
'message': {'key': 'message', 'type': 'str'},
'op_id': {'key': 'opId', 'type': 'int'},
'state': {'key': 'state', 'type': 'object'}
}
def __init__(self, clone_statistics=None, completion_date=None, creation_date=None, links=None, message=None, op_id=None, state=None):
super(CloneOperationCommonResponse, self).__init__()
self.clone_statistics = clone_statistics
self.completion_date = completion_date
self.creation_date = creation_date
self.links = links
self.message = message
self.op_id = op_id
self.state = state
class CloneOptions(Model):
"""
Clone options for cloning the test suite.
:param clone_requirements: If set to true requirements will be cloned
:type clone_requirements: bool
:param copy_all_suites: copy all suites from a source plan
:type copy_all_suites: bool
:param copy_ancestor_hierarchy: copy ancestor hierarchy
:type copy_ancestor_hierarchy: bool
:param destination_work_item_type: Name of the workitem type of the clone
:type destination_work_item_type: str
:param override_parameters: Key value pairs where the key value is overridden by the value.
:type override_parameters: dict
:param related_link_comment: Comment on the link that will link the new clone test case to the original Set null for no comment
:type related_link_comment: str
"""
_attribute_map = {
'clone_requirements': {'key': 'cloneRequirements', 'type': 'bool'},
'copy_all_suites': {'key': 'copyAllSuites', 'type': 'bool'},
'copy_ancestor_hierarchy': {'key': 'copyAncestorHierarchy', 'type': 'bool'},
'destination_work_item_type': {'key': 'destinationWorkItemType', 'type': 'str'},
'override_parameters': {'key': 'overrideParameters', 'type': '{str}'},
'related_link_comment': {'key': 'relatedLinkComment', 'type': 'str'}
}
def __init__(self, clone_requirements=None, copy_all_suites=None, copy_ancestor_hierarchy=None, destination_work_item_type=None, override_parameters=None, related_link_comment=None):
super(CloneOptions, self).__init__()
self.clone_requirements = clone_requirements
self.copy_all_suites = copy_all_suites
self.copy_ancestor_hierarchy = copy_ancestor_hierarchy
self.destination_work_item_type = destination_work_item_type
self.override_parameters = override_parameters
self.related_link_comment = related_link_comment
class CloneStatistics(Model):
"""
Clone Statistics Details.
:param cloned_requirements_count: Number of requirements cloned so far.
:type cloned_requirements_count: int
:param cloned_shared_steps_count: Number of shared steps cloned so far.
:type cloned_shared_steps_count: int
:param cloned_test_cases_count: Number of test cases cloned so far
:type cloned_test_cases_count: int
:param total_requirements_count: Total number of requirements to be cloned
:type total_requirements_count: int
:param total_test_cases_count: Total number of test cases to be cloned
:type total_test_cases_count: int
"""
_attribute_map = {
'cloned_requirements_count': {'key': 'clonedRequirementsCount', 'type': 'int'},
'cloned_shared_steps_count': {'key': 'clonedSharedStepsCount', 'type': 'int'},
'cloned_test_cases_count': {'key': 'clonedTestCasesCount', 'type': 'int'},
'total_requirements_count': {'key': 'totalRequirementsCount', 'type': 'int'},
'total_test_cases_count': {'key': 'totalTestCasesCount', 'type': 'int'}
}
def __init__(self, cloned_requirements_count=None, cloned_shared_steps_count=None, cloned_test_cases_count=None, total_requirements_count=None, total_test_cases_count=None):
super(CloneStatistics, self).__init__()
self.cloned_requirements_count = cloned_requirements_count
self.cloned_shared_steps_count = cloned_shared_steps_count
self.cloned_test_cases_count = cloned_test_cases_count
self.total_requirements_count = total_requirements_count
self.total_test_cases_count = total_test_cases_count
class CloneTestCaseOperationInformation(Model):
"""
:param clone_operation_response: Various information related to the clone
:type clone_operation_response: :class:`CloneOperationCommonResponse <azure.devops.v7_1.test_plan.models.CloneOperationCommonResponse>`
:param clone_options: Test Plan Clone create parameters
:type clone_options: :class:`CloneTestCaseOptions <azure.devops.v7_1.test_plan.models.CloneTestCaseOptions>`
:param destination_test_suite: Information of destination Test Suite
:type destination_test_suite: :class:`TestSuiteReferenceWithProject <azure.devops.v7_1.test_plan.models.TestSuiteReferenceWithProject>`
:param source_test_suite: Information of source Test Suite
:type source_test_suite: :class:`SourceTestSuiteResponse <azure.devops.v7_1.test_plan.models.SourceTestSuiteResponse>`
"""
_attribute_map = {
'clone_operation_response': {'key': 'cloneOperationResponse', 'type': 'CloneOperationCommonResponse'},
'clone_options': {'key': 'cloneOptions', 'type': 'CloneTestCaseOptions'},
'destination_test_suite': {'key': 'destinationTestSuite', 'type': 'TestSuiteReferenceWithProject'},
'source_test_suite': {'key': 'sourceTestSuite', 'type': 'SourceTestSuiteResponse'}
}
def __init__(self, clone_operation_response=None, clone_options=None, destination_test_suite=None, source_test_suite=None):
super(CloneTestCaseOperationInformation, self).__init__()
self.clone_operation_response = clone_operation_response
self.clone_options = clone_options
self.destination_test_suite = destination_test_suite
self.source_test_suite = source_test_suite
class CloneTestCaseOptions(Model):
"""
:param include_attachments: If set to true, include the attachments
:type include_attachments: bool
:param include_links: If set to true, include the links
:type include_links: bool
:param related_link_comment: Comment on the link that will link the new clone test case to the original Set null for no comment
:type related_link_comment: str
"""
_attribute_map = {
'include_attachments': {'key': 'includeAttachments', 'type': 'bool'},
'include_links': {'key': 'includeLinks', 'type': 'bool'},
'related_link_comment': {'key': 'relatedLinkComment', 'type': 'str'}
}
def __init__(self, include_attachments=None, include_links=None, related_link_comment=None):
super(CloneTestCaseOptions, self).__init__()
self.include_attachments = include_attachments
self.include_links = include_links
self.related_link_comment = related_link_comment
class CloneTestCaseParams(Model):
"""
Parameters for Test Suite clone operation
:param clone_options: Test Case Clone create parameters
:type clone_options: :class:`CloneTestCaseOptions <azure.devops.v7_1.test_plan.models.CloneTestCaseOptions>`
:param destination_test_plan: Information about destination Test Plan
:type destination_test_plan: :class:`TestPlanReference <azure.devops.v7_1.test_plan.models.TestPlanReference>`
:param destination_test_suite: Information about destination Test Suite
:type destination_test_suite: :class:`DestinationTestSuiteInfo <azure.devops.v7_1.test_plan.models.DestinationTestSuiteInfo>`
:param source_test_plan: Information about source Test Plan
:type source_test_plan: :class:`TestPlanReference <azure.devops.v7_1.test_plan.models.TestPlanReference>`
:param source_test_suite: Information about source Test Suite
:type source_test_suite: :class:`SourceTestSuiteInfo <azure.devops.v7_1.test_plan.models.SourceTestSuiteInfo>`
:param test_case_ids: Test Case IDs
:type test_case_ids: list of int
"""
_attribute_map = {
'clone_options': {'key': 'cloneOptions', 'type': 'CloneTestCaseOptions'},
'destination_test_plan': {'key': 'destinationTestPlan', 'type': 'TestPlanReference'},
'destination_test_suite': {'key': 'destinationTestSuite', 'type': 'DestinationTestSuiteInfo'},
'source_test_plan': {'key': 'sourceTestPlan', 'type': 'TestPlanReference'},
'source_test_suite': {'key': 'sourceTestSuite', 'type': 'SourceTestSuiteInfo'},
'test_case_ids': {'key': 'testCaseIds', 'type': '[int]'}
}
def __init__(self, clone_options=None, destination_test_plan=None, destination_test_suite=None, source_test_plan=None, source_test_suite=None, test_case_ids=None):
super(CloneTestCaseParams, self).__init__()
self.clone_options = clone_options
self.destination_test_plan = destination_test_plan
self.destination_test_suite = destination_test_suite
self.source_test_plan = source_test_plan
self.source_test_suite = source_test_suite
self.test_case_ids = test_case_ids
class CloneTestPlanOperationInformation(Model):
"""
Response for Test Plan clone operation
:param clone_operation_response: Various information related to the clone
:type clone_operation_response: :class:`CloneOperationCommonResponse <azure.devops.v7_1.test_plan.models.CloneOperationCommonResponse>`
:param clone_options: Test Plan Clone create parameters
:type clone_options: :class:`CloneOptions <azure.devops.v7_1.test_plan.models.CloneOptions>`
:param destination_test_plan: Information of destination Test Plan
:type destination_test_plan: :class:`TestPlan <azure.devops.v7_1.test_plan.models.TestPlan>`
:param source_test_plan: Information of source Test Plan
:type source_test_plan: :class:`SourceTestplanResponse <azure.devops.v7_1.test_plan.models.SourceTestplanResponse>`
"""
_attribute_map = {
'clone_operation_response': {'key': 'cloneOperationResponse', 'type': 'CloneOperationCommonResponse'},
'clone_options': {'key': 'cloneOptions', 'type': 'CloneOptions'},
'destination_test_plan': {'key': 'destinationTestPlan', 'type': 'TestPlan'},
'source_test_plan': {'key': 'sourceTestPlan', 'type': 'SourceTestplanResponse'}
}
def __init__(self, clone_operation_response=None, clone_options=None, destination_test_plan=None, source_test_plan=None):
super(CloneTestPlanOperationInformation, self).__init__()
self.clone_operation_response = clone_operation_response
self.clone_options = clone_options
self.destination_test_plan = destination_test_plan
self.source_test_plan = source_test_plan
class CloneTestPlanParams(Model):
"""
Parameters for Test Plan clone operation
:param clone_options: Test Plan Clone create parameters
:type clone_options: :class:`CloneOptions <azure.devops.v7_1.test_plan.models.CloneOptions>`
:param destination_test_plan: Information about destination Test Plan
:type destination_test_plan: :class:`DestinationTestPlanCloneParams <azure.devops.v7_1.test_plan.models.DestinationTestPlanCloneParams>`
:param source_test_plan: Information about source Test Plan
:type source_test_plan: :class:`SourceTestPlanInfo <azure.devops.v7_1.test_plan.models.SourceTestPlanInfo>`
"""
_attribute_map = {
'clone_options': {'key': 'cloneOptions', 'type': 'CloneOptions'},
'destination_test_plan': {'key': 'destinationTestPlan', 'type': 'DestinationTestPlanCloneParams'},
'source_test_plan': {'key': 'sourceTestPlan', 'type': 'SourceTestPlanInfo'}
}
def __init__(self, clone_options=None, destination_test_plan=None, source_test_plan=None):
super(CloneTestPlanParams, self).__init__()
self.clone_options = clone_options
self.destination_test_plan = destination_test_plan
self.source_test_plan = source_test_plan
class CloneTestSuiteOperationInformation(Model):
"""
Response for Test Suite clone operation
:param cloned_test_suite: Information of newly cloned Test Suite
:type cloned_test_suite: :class:`TestSuiteReferenceWithProject <azure.devops.v7_1.test_plan.models.TestSuiteReferenceWithProject>`
:param clone_operation_response: Various information related to the clone
:type clone_operation_response: :class:`CloneOperationCommonResponse <azure.devops.v7_1.test_plan.models.CloneOperationCommonResponse>`
:param clone_options: Test Plan Clone create parameters
:type clone_options: :class:`CloneOptions <azure.devops.v7_1.test_plan.models.CloneOptions>`
:param destination_test_suite: Information of destination Test Suite
:type destination_test_suite: :class:`TestSuiteReferenceWithProject <azure.devops.v7_1.test_plan.models.TestSuiteReferenceWithProject>`
:param source_test_suite: Information of source Test Suite
:type source_test_suite: :class:`TestSuiteReferenceWithProject <azure.devops.v7_1.test_plan.models.TestSuiteReferenceWithProject>`
"""
_attribute_map = {
'cloned_test_suite': {'key': 'clonedTestSuite', 'type': 'TestSuiteReferenceWithProject'},
'clone_operation_response': {'key': 'cloneOperationResponse', 'type': 'CloneOperationCommonResponse'},
'clone_options': {'key': 'cloneOptions', 'type': 'CloneOptions'},
'destination_test_suite': {'key': 'destinationTestSuite', 'type': 'TestSuiteReferenceWithProject'},
'source_test_suite': {'key': 'sourceTestSuite', 'type': 'TestSuiteReferenceWithProject'}
}
def __init__(self, cloned_test_suite=None, clone_operation_response=None, clone_options=None, destination_test_suite=None, source_test_suite=None):
super(CloneTestSuiteOperationInformation, self).__init__()
self.cloned_test_suite = cloned_test_suite
self.clone_operation_response = clone_operation_response
self.clone_options = clone_options
self.destination_test_suite = destination_test_suite
self.source_test_suite = source_test_suite
class CloneTestSuiteParams(Model):
"""
Parameters for Test Suite clone operation
:param clone_options: Test Plan Clone create parameters
:type clone_options: :class:`CloneOptions <azure.devops.v7_1.test_plan.models.CloneOptions>`
:param destination_test_suite: Information about destination Test Suite
:type destination_test_suite: :class:`DestinationTestSuiteInfo <azure.devops.v7_1.test_plan.models.DestinationTestSuiteInfo>`
:param source_test_suite: Information about source Test Suite
:type source_test_suite: :class:`SourceTestSuiteInfo <azure.devops.v7_1.test_plan.models.SourceTestSuiteInfo>`
"""
_attribute_map = {
'clone_options': {'key': 'cloneOptions', 'type': 'CloneOptions'},
'destination_test_suite': {'key': 'destinationTestSuite', 'type': 'DestinationTestSuiteInfo'},
'source_test_suite': {'key': 'sourceTestSuite', 'type': 'SourceTestSuiteInfo'}
}
def __init__(self, clone_options=None, destination_test_suite=None, source_test_suite=None):
super(CloneTestSuiteParams, self).__init__()
self.clone_options = clone_options
self.destination_test_suite = destination_test_suite
self.source_test_suite = source_test_suite
class Configuration(Model):
"""
Configuration of the Test Point
:param configuration_id: Id of the Configuration Assigned to the Test Point
:type configuration_id: int
"""
_attribute_map = {
'configuration_id': {'key': 'configurationId', 'type': 'int'}
}
def __init__(self, configuration_id=None):
super(Configuration, self).__init__()
self.configuration_id = configuration_id
class DestinationTestSuiteInfo(Model):
"""
Destination Test Suite information for Test Suite clone operation
:param id: Destination Suite Id
:type id: int
:param project: Destination Project Name
:type project: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'project': {'key': 'project', 'type': 'str'}
}
def __init__(self, id=None, project=None):
super(DestinationTestSuiteInfo, self).__init__()
self.id = id
self.project = project
class GraphSubjectBase(Model):
"""
:param _links: This field contains zero or more interesting links about the graph subject. These links may be invoked to obtain additional relationships or more detailed information about this graph subject.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.microsoft._visual_studio._services._web_api.models.ReferenceLinks>`
:param descriptor: The descriptor is the primary way to reference the graph subject while the system is running. This field will uniquely identify the same graph subject across both Accounts and Organizations.
:type descriptor: str
:param display_name: This is the non-unique display name of the graph subject. To change this field, you must alter its value in the source provider.
:type display_name: str
:param url: This url is the full route to the source resource of this graph subject.
:type url: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, _links=None, descriptor=None, display_name=None, url=None):
super(GraphSubjectBase, self).__init__()
self._links = _links
self.descriptor = descriptor
self.display_name = display_name
self.url = url
class IdentityRef(GraphSubjectBase):
"""
:param _links: This field contains zero or more interesting links about the graph subject. These links may be invoked to obtain additional relationships or more detailed information about this graph subject.
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.microsoft._visual_studio._services._web_api.models.ReferenceLinks>`
:param descriptor: The descriptor is the primary way to reference the graph subject while the system is running. This field will uniquely identify the same graph subject across both Accounts and Organizations.
:type descriptor: str
:param display_name: This is the non-unique display name of the graph subject. To change this field, you must alter its value in the source provider.
:type display_name: str
:param url: This url is the full route to the source resource of this graph subject.
:type url: str
:param directory_alias: Deprecated - Can be retrieved by querying the Graph user referenced in the "self" entry of the IdentityRef "_links" dictionary
:type directory_alias: str
:param id:
:type id: str
:param image_url: Deprecated - Available in the "avatar" entry of the IdentityRef "_links" dictionary
:type image_url: str
:param inactive: Deprecated - Can be retrieved by querying the Graph membership state referenced in the "membershipState" entry of the GraphUser "_links" dictionary
:type inactive: bool
:param is_aad_identity: Deprecated - Can be inferred from the subject type of the descriptor (Descriptor.IsAadUserType/Descriptor.IsAadGroupType)
:type is_aad_identity: bool
:param is_container: Deprecated - Can be inferred from the subject type of the descriptor (Descriptor.IsGroupType)
:type is_container: bool
:param is_deleted_in_origin:
:type is_deleted_in_origin: bool
:param profile_url: Deprecated - not in use in most preexisting implementations of ToIdentityRef
:type profile_url: str
:param unique_name: Deprecated - use Domain+PrincipalName instead
:type unique_name: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'descriptor': {'key': 'descriptor', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'directory_alias': {'key': 'directoryAlias', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'image_url': {'key': 'imageUrl', 'type': 'str'},
'inactive': {'key': 'inactive', 'type': 'bool'},
'is_aad_identity': {'key': 'isAadIdentity', 'type': 'bool'},
'is_container': {'key': 'isContainer', 'type': 'bool'},
'is_deleted_in_origin': {'key': 'isDeletedInOrigin', 'type': 'bool'},
'profile_url': {'key': 'profileUrl', 'type': 'str'},
'unique_name': {'key': 'uniqueName', 'type': 'str'}
}
def __init__(self, _links=None, descriptor=None, display_name=None, url=None, directory_alias=None, id=None, image_url=None, inactive=None, is_aad_identity=None, is_container=None, is_deleted_in_origin=None, profile_url=None, unique_name=None):
super(IdentityRef, self).__init__(_links=_links, descriptor=descriptor, display_name=display_name, url=url)
self.directory_alias = directory_alias
self.id = id
self.image_url = image_url
self.inactive = inactive
self.is_aad_identity = is_aad_identity
self.is_container = is_container
self.is_deleted_in_origin = is_deleted_in_origin
self.profile_url = profile_url
self.unique_name = unique_name
class LastResultDetails(Model):
"""
Last result details of test point.
:param date_completed: Completed date of last result.
:type date_completed: datetime
:param duration: Duration of the last result in milliseconds.
:type duration: long
:param run_by: The user who executed the last result.
:type run_by: :class:`IdentityRef <azure.devops.v7_1.microsoft._team_foundation._test_management._web_api.models.IdentityRef>`
"""
_attribute_map = {
'date_completed': {'key': 'dateCompleted', 'type': 'iso-8601'},
'duration': {'key': 'duration', 'type': 'long'},
'run_by': {'key': 'runBy', 'type': 'IdentityRef'}
}
def __init__(self, date_completed=None, duration=None, run_by=None):
super(LastResultDetails, self).__init__()
self.date_completed = date_completed
self.duration = duration
self.run_by = run_by
class LibraryWorkItemsData(Model):
"""
This data model is used in Work item-based tabs of Test Plans Library.
:param column_options: Specifies the column option field names
:type column_options: list of str
:param continuation_token: Continuation token to fetch next set of elements. Present only when HasMoreElements is true.
:type continuation_token: str
:param exceeded_work_item_query_limit: Boolean indicating if the WIQL query has exceeded the limit of items returned.
:type exceeded_work_item_query_limit: bool
:param has_more_elements: Boolean indicating if there are more elements present than what are being sent.
:type has_more_elements: bool
:param return_code: Specifies if there was an error while execution of data provider.
:type return_code: object
:param work_item_ids: List of work items returned when OrderByField is sent something other than Id.
:type work_item_ids: list of int
:param work_items: List of work items to be returned.
:type work_items: list of :class:`WorkItemDetails <azure.devops.v7_1.test_plan.models.WorkItemDetails>`
"""
_attribute_map = {
'column_options': {'key': 'columnOptions', 'type': '[str]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'exceeded_work_item_query_limit': {'key': 'exceededWorkItemQueryLimit', 'type': 'bool'},
'has_more_elements': {'key': 'hasMoreElements', 'type': 'bool'},
'return_code': {'key': 'returnCode', 'type': 'object'},
'work_item_ids': {'key': 'workItemIds', 'type': '[int]'},
'work_items': {'key': 'workItems', 'type': '[WorkItemDetails]'}
}
def __init__(self, column_options=None, continuation_token=None, exceeded_work_item_query_limit=None, has_more_elements=None, return_code=None, work_item_ids=None, work_items=None):
super(LibraryWorkItemsData, self).__init__()
self.column_options = column_options
self.continuation_token = continuation_token
self.exceeded_work_item_query_limit = exceeded_work_item_query_limit
self.has_more_elements = has_more_elements
self.return_code = return_code
self.work_item_ids = work_item_ids
self.work_items = work_items
class LibraryWorkItemsDataProviderRequest(Model):
"""
This is the request data contract for LibraryTestCaseDataProvider.
:param column_options: Specifies the list of column options to show in test cases table.
:type column_options: list of str
:param continuation_token: The continuation token required for paging of work items. This is required when getting subsequent sets of work items when OrderByField is Id.
:type continuation_token: str
:param filter_values: List of filter values to be supplied. Currently supported filters are Title, State, AssignedTo, Priority, AreaPath.
:type filter_values: list of :class:`TestPlansLibraryWorkItemFilter <azure.devops.v7_1.test_plan.models.TestPlansLibraryWorkItemFilter>`
:param is_ascending: Whether the data is to be sorted in ascending or descending order. When not supplied, defaults to descending.
:type is_ascending: bool
:param library_query_type: The type of query to run.
:type library_query_type: object
:param order_by_field: Work item field on which to order the results. When not supplied, defaults to work item IDs.
:type order_by_field: str
:param work_item_ids: List of work items to query for field details. This is required when getting subsequent sets of work item fields when OrderByField is other than Id.
:type work_item_ids: list of int
"""
_attribute_map = {
'column_options': {'key': 'columnOptions', 'type': '[str]'},
'continuation_token': {'key': 'continuationToken', 'type': 'str'},
'filter_values': {'key': 'filterValues', 'type': '[TestPlansLibraryWorkItemFilter]'},
'is_ascending': {'key': 'isAscending', 'type': 'bool'},
'library_query_type': {'key': 'libraryQueryType', 'type': 'object'},
'order_by_field': {'key': 'orderByField', 'type': 'str'},
'work_item_ids': {'key': 'workItemIds', 'type': '[int]'}
}
def __init__(self, column_options=None, continuation_token=None, filter_values=None, is_ascending=None, library_query_type=None, order_by_field=None, work_item_ids=None):
super(LibraryWorkItemsDataProviderRequest, self).__init__()
self.column_options = column_options
self.continuation_token = continuation_token
self.filter_values = filter_values
self.is_ascending = is_ascending
self.library_query_type = library_query_type
self.order_by_field = order_by_field
self.work_item_ids = work_item_ids
class NameValuePair(Model):
"""
Name value pair
:param name: Name
:type name: str
:param value: Value
:type value: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'}
}
def __init__(self, name=None, value=None):
super(NameValuePair, self).__init__()
self.name = name
self.value = value
class PointAssignment(Configuration):
"""
Assignments for the Test Point
:param configuration_id: Id of the Configuration Assigned to the Test Point
:type configuration_id: int
:param configuration_name: Name of the Configuration Assigned to the Test Point
:type configuration_name: str
:param id: Id of the Test Point
:type id: int
:param tester: Tester Assigned to the Test Point
:type tester: :class:`IdentityRef <azure.devops.v7_1.test_plan.models.IdentityRef>`
"""
_attribute_map = {
'configuration_id': {'key': 'configurationId', 'type': 'int'},
'configuration_name': {'key': 'configurationName', 'type': 'str'},
'id': {'key': 'id', 'type': 'int'},
'tester': {'key': 'tester', 'type': 'IdentityRef'}
}
def __init__(self, configuration_id=None, configuration_name=None, id=None, tester=None):
super(PointAssignment, self).__init__(configuration_id=configuration_id)
self.configuration_name = configuration_name
self.id = id
self.tester = tester
class ReferenceLinks(Model):
"""
The class to represent a collection of REST reference links.
:param links: The readonly view of the links. Because Reference links are readonly, we only want to expose them as read only.
:type links: dict
"""
_attribute_map = {
'links': {'key': 'links', 'type': '{object}'}
}
def __init__(self, links=None):
super(ReferenceLinks, self).__init__()
self.links = links
class ReleaseEnvironmentDefinitionReference(Model):
"""
Reference to release environment resource.
:param definition_id: ID of the release definition that contains the release environment definition.
:type definition_id: int
:param environment_definition_id: ID of the release environment definition.
:type environment_definition_id: int
"""
_attribute_map = {
'definition_id': {'key': 'definitionId', 'type': 'int'},
'environment_definition_id': {'key': 'environmentDefinitionId', 'type': 'int'}
}
def __init__(self, definition_id=None, environment_definition_id=None):
super(ReleaseEnvironmentDefinitionReference, self).__init__()
self.definition_id = definition_id
self.environment_definition_id = environment_definition_id
class Results(Model):
"""
Results class for Test Point
:param outcome: Outcome of the Test Point
:type outcome: object
"""
_attribute_map = {
'outcome': {'key': 'outcome', 'type': 'object'}
}
def __init__(self, outcome=None):
super(Results, self).__init__()
self.outcome = outcome
class SourceTestPlanInfo(Model):
"""
Source Test Plan information for Test Plan clone operation
:param id: ID of the source Test Plan
:type id: int
:param suite_ids: Id of suites to be cloned inside source Test Plan
:type suite_ids: list of int
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'suite_ids': {'key': 'suiteIds', 'type': '[int]'}
}
def __init__(self, id=None, suite_ids=None):
super(SourceTestPlanInfo, self).__init__()
self.id = id
self.suite_ids = suite_ids
class SourceTestSuiteInfo(Model):
"""
Source Test Suite information for Test Suite clone operation
:param id: Id of the Source Test Suite
:type id: int
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'}
}
def __init__(self, id=None):
super(SourceTestSuiteInfo, self).__init__()
self.id = id
class SuiteEntryUpdateParams(Model):
"""
A suite entry defines properties for a test suite.
:param id: Id of the suite entry in the test suite: either a test case id or child suite id.
:type id: int
:param sequence_number: Sequence number for the suite entry object in the test suite.
:type sequence_number: int
:param suite_entry_type: Defines whether the entry is of type test case or suite.
:type suite_entry_type: object
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'sequence_number': {'key': 'sequenceNumber', 'type': 'int'},
'suite_entry_type': {'key': 'suiteEntryType', 'type': 'object'}
}
def __init__(self, id=None, sequence_number=None, suite_entry_type=None):
super(SuiteEntryUpdateParams, self).__init__()
self.id = id
self.sequence_number = sequence_number
self.suite_entry_type = suite_entry_type
class SuiteTestCaseCreateUpdateParameters(Model):
"""
Create and Update Suite Test Case Parameters
:param point_assignments: Configurations Ids
:type point_assignments: list of :class:`Configuration <azure.devops.v7_1.test_plan.models.Configuration>`
:param work_item: Id of Test Case to be updated or created
:type work_item: :class:`WorkItem <azure.devops.v7_1.test_plan.models.WorkItem>`
"""
_attribute_map = {
'point_assignments': {'key': 'pointAssignments', 'type': '[Configuration]'},
'work_item': {'key': 'workItem', 'type': 'WorkItem'}
}
def __init__(self, point_assignments=None, work_item=None):
super(SuiteTestCaseCreateUpdateParameters, self).__init__()
self.point_assignments = point_assignments
self.work_item = work_item
class TeamProjectReference(Model):
"""
Represents a shallow reference to a TeamProject.
:param abbreviation: Project abbreviation.
:type abbreviation: str
:param default_team_image_url: Url to default team identity image.
:type default_team_image_url: str
:param description: The project's description (if any).
:type description: str
:param id: Project identifier.
:type id: str
:param last_update_time: Project last update time.
:type last_update_time: datetime
:param name: Project name.
:type name: str
:param revision: Project revision.
:type revision: long
:param state: Project state.
:type state: object
:param url: Url to the full version of the object.
:type url: str
:param visibility: Project visibility.
:type visibility: object
"""
_attribute_map = {
'abbreviation': {'key': 'abbreviation', 'type': 'str'},
'default_team_image_url': {'key': 'defaultTeamImageUrl', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'last_update_time': {'key': 'lastUpdateTime', 'type': 'iso-8601'},
'name': {'key': 'name', 'type': 'str'},
'revision': {'key': 'revision', 'type': 'long'},
'state': {'key': 'state', 'type': 'object'},
'url': {'key': 'url', 'type': 'str'},
'visibility': {'key': 'visibility', 'type': 'object'}
}
def __init__(self, abbreviation=None, default_team_image_url=None, description=None, id=None, last_update_time=None, name=None, revision=None, state=None, url=None, visibility=None):
super(TeamProjectReference, self).__init__()
self.abbreviation = abbreviation
self.default_team_image_url = default_team_image_url
self.description = description
self.id = id
self.last_update_time = last_update_time
self.name = name
self.revision = revision
self.state = state
self.url = url
self.visibility = visibility
class TestCase(Model):
"""
Test Case Class
:param links: Reference links
:type links: :class:`ReferenceLinks <azure.devops.v7_1.test_plan.models.ReferenceLinks>`
:param order: Order of the TestCase in the Suite
:type order: int
:param point_assignments: List of Points associated with the Test Case
:type point_assignments: list of :class:`PointAssignment <azure.devops.v7_1.test_plan.models.PointAssignment>`
:param project: Project under which the Test Case is
:type project: :class:`TeamProjectReference <azure.devops.v7_1.test_plan.models.TeamProjectReference>`
:param test_plan: Test Plan under which the Test Case is
:type test_plan: :class:`TestPlanReference <azure.devops.v7_1.test_plan.models.TestPlanReference>`
:param test_suite: Test Suite under which the Test Case is
:type test_suite: :class:`TestSuiteReference <azure.devops.v7_1.test_plan.models.TestSuiteReference>`
:param work_item: Work Item details of the TestCase
:type work_item: :class:`WorkItemDetails <azure.devops.v7_1.test_plan.models.WorkItemDetails>`
"""
_attribute_map = {
'links': {'key': 'links', 'type': 'ReferenceLinks'},
'order': {'key': 'order', 'type': 'int'},
'point_assignments': {'key': 'pointAssignments', 'type': '[PointAssignment]'},
'project': {'key': 'project', 'type': 'TeamProjectReference'},
'test_plan': {'key': 'testPlan', 'type': 'TestPlanReference'},
'test_suite': {'key': 'testSuite', 'type': 'TestSuiteReference'},
'work_item': {'key': 'workItem', 'type': 'WorkItemDetails'}
}
def __init__(self, links=None, order=None, point_assignments=None, project=None, test_plan=None, test_suite=None, work_item=None):
super(TestCase, self).__init__()
self.links = links
self.order = order
self.point_assignments = point_assignments
self.project = project
self.test_plan = test_plan
self.test_suite = test_suite
self.work_item = work_item
class TestCaseAssociatedResult(Model):
"""
:param completed_date:
:type completed_date: datetime
:param configuration:
:type configuration: :class:`TestConfigurationReference <azure.devops.v7_1.test_plan.models.TestConfigurationReference>`
:param outcome:
:type outcome: object
:param plan:
:type plan: :class:`TestPlanReference <azure.devops.v7_1.test_plan.models.TestPlanReference>`
:param point_id:
:type point_id: int
:param result_id:
:type result_id: int
:param run_by:
:type run_by: :class:`IdentityRef <azure.devops.v7_1.test_plan.models.IdentityRef>`
:param run_id:
:type run_id: int
:param suite:
:type suite: :class:`TestSuiteReference <azure.devops.v7_1.test_plan.models.TestSuiteReference>`
:param tester:
:type tester: :class:`IdentityRef <azure.devops.v7_1.test_plan.models.IdentityRef>`
"""
_attribute_map = {
'completed_date': {'key': 'completedDate', 'type': 'iso-8601'},
'configuration': {'key': 'configuration', 'type': 'TestConfigurationReference'},
'outcome': {'key': 'outcome', 'type': 'object'},
'plan': {'key': 'plan', 'type': 'TestPlanReference'},
'point_id': {'key': 'pointId', 'type': 'int'},
'result_id': {'key': 'resultId', 'type': 'int'},
'run_by': {'key': 'runBy', 'type': 'IdentityRef'},
'run_id': {'key': 'runId', 'type': 'int'},
'suite': {'key': 'suite', 'type': 'TestSuiteReference'},
'tester': {'key': 'tester', 'type': 'IdentityRef'}
}
def __init__(self, completed_date=None, configuration=None, outcome=None, plan=None, point_id=None, result_id=None, run_by=None, run_id=None, suite=None, tester=None):
super(TestCaseAssociatedResult, self).__init__()
self.completed_date = completed_date
self.configuration = configuration
self.outcome = outcome
self.plan = plan
self.point_id = point_id
self.result_id = result_id
self.run_by = run_by
self.run_id = run_id
self.suite = suite
self.tester = tester
class TestCaseReference(Model):
"""
Test Case Reference
:param assigned_to: Identity to whom the test case is assigned
:type assigned_to: :class:`IdentityRef <azure.devops.v7_1.test_plan.models.IdentityRef>`
:param id: Test Case Id
:type id: int
:param name: Test Case Name
:type name: str
:param state: State of the test case work item
:type state: str
"""
_attribute_map = {
'assigned_to': {'key': 'assignedTo', 'type': 'IdentityRef'},
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'}
}
def __init__(self, assigned_to=None, id=None, name=None, state=None):
super(TestCaseReference, self).__init__()
self.assigned_to = assigned_to
self.id = id
self.name = name
self.state = state
class TestCaseResultsData(Model):
"""
This data model is used in TestCaseResultsDataProvider and populates the data required for initial page load
:param context_point: Point information from where the execution history was viewed. Used to set initial filters.
:type context_point: :class:`TestPointDetailedReference <azure.devops.v7_1.test_plan.models.TestPointDetailedReference>`
:param results: Use to store the results displayed in the table
:type results: list of :class:`TestCaseAssociatedResult <azure.devops.v7_1.test_plan.models.TestCaseAssociatedResult>`
:param test_case_name: Test Case Name to be displayed in the table header
:type test_case_name: str
"""
_attribute_map = {
'context_point': {'key': 'contextPoint', 'type': 'TestPointDetailedReference'},
'results': {'key': 'results', 'type': '[TestCaseAssociatedResult]'},
'test_case_name': {'key': 'testCaseName', 'type': 'str'}
}
def __init__(self, context_point=None, results=None, test_case_name=None):
super(TestCaseResultsData, self).__init__()
self.context_point = context_point
self.results = results
self.test_case_name = test_case_name
class TestConfigurationCreateUpdateParameters(Model):
"""
Test Configuration Create or Update Parameters
:param description: Description of the configuration
:type description: str
:param is_default: Is the configuration a default for the test plans
:type is_default: bool
:param name: Name of the configuration
:type name: str
:param state: State of the configuration
:type state: object
:param values: Dictionary of Test Variable, Selected Value
:type values: list of :class:`NameValuePair <azure.devops.v7_1.test_plan.models.NameValuePair>`
"""
_attribute_map = {
'description': {'key': 'description', 'type': 'str'},
'is_default': {'key': 'isDefault', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'state': {'key': 'state', 'type': 'object'},
'values': {'key': 'values', 'type': '[NameValuePair]'}
}
def __init__(self, description=None, is_default=None, name=None, state=None, values=None):
super(TestConfigurationCreateUpdateParameters, self).__init__()
self.description = description
self.is_default = is_default
self.name = name
self.state = state
self.values = values
class TestConfigurationReference(Model):
"""
Test Configuration Reference
:param id: Id of the configuration
:type id: int
:param name: Name of the configuration
:type name: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, id=None, name=None):
super(TestConfigurationReference, self).__init__()
self.id = id
self.name = name
class TestEntityCount(Model):
"""
Test Entity Count Used to store test cases count (define tab) and test point count (execute tab) Used to store test cases count (define tab) and test point count (execute tab)
:param count: Test Entity Count
:type count: int
:param test_plan_id: Test Plan under which the Test Entities are
:type test_plan_id: int
:param test_suite_id: Test Suite under which the Test Entities are
:type test_suite_id: int
:param total_count: Total test entities in the suite without the applied filters
:type total_count: int
"""
_attribute_map = {
'count': {'key': 'count', 'type': 'int'},
'test_plan_id': {'key': 'testPlanId', 'type': 'int'},
'test_suite_id': {'key': 'testSuiteId', 'type': 'int'},
'total_count': {'key': 'totalCount', 'type': 'int'}
}
def __init__(self, count=None, test_plan_id=None, test_suite_id=None, total_count=None):
super(TestEntityCount, self).__init__()
self.count = count
self.test_plan_id = test_plan_id
self.test_suite_id = test_suite_id
self.total_count = total_count
class TestEnvironment(Model):
"""
Test environment Detail.
:param environment_id: Test Environment Id.
:type environment_id: str
:param environment_name: Test Environment Name.
:type environment_name: str
"""
_attribute_map = {
'environment_id': {'key': 'environmentId', 'type': 'str'},
'environment_name': {'key': 'environmentName', 'type': 'str'}
}
def __init__(self, environment_id=None, environment_name=None):
super(TestEnvironment, self).__init__()
self.environment_id = environment_id
self.environment_name = environment_name
class TestOutcomeSettings(Model):
"""
Test outcome settings
:param sync_outcome_across_suites: Value to configure how test outcomes for the same tests across suites are shown
:type sync_outcome_across_suites: bool
"""
_attribute_map = {
'sync_outcome_across_suites': {'key': 'syncOutcomeAcrossSuites', 'type': 'bool'}
}
def __init__(self, sync_outcome_across_suites=None):
super(TestOutcomeSettings, self).__init__()
self.sync_outcome_across_suites = sync_outcome_across_suites
class TestPlanCreateParams(Model):
"""
The test plan create parameters.
:param area_path: Area of the test plan.
:type area_path: str
:param automated_test_environment:
:type automated_test_environment: :class:`TestEnvironment <azure.devops.v7_1.test_plan.models.TestEnvironment>`
:param automated_test_settings:
:type automated_test_settings: :class:`TestSettings <azure.devops.v7_1.test_plan.models.TestSettings>`
:param build_definition: The Build Definition that generates a build associated with this test plan.
:type build_definition: :class:`BuildDefinitionReference <azure.devops.v7_1.test_plan.models.BuildDefinitionReference>`
:param build_id: Build to be tested.
:type build_id: int
:param description: Description of the test plan.
:type description: str
:param end_date: End date for the test plan.
:type end_date: datetime
:param iteration: Iteration path of the test plan.
:type iteration: str
:param manual_test_environment:
:type manual_test_environment: :class:`TestEnvironment <azure.devops.v7_1.test_plan.models.TestEnvironment>`
:param manual_test_settings:
:type manual_test_settings: :class:`TestSettings <azure.devops.v7_1.test_plan.models.TestSettings>`
:param name: Name of the test plan.
:type name: str
:param owner: Owner of the test plan.
:type owner: :class:`IdentityRef <azure.devops.v7_1.test_plan.models.IdentityRef>`
:param release_environment_definition: Release Environment to be used to deploy the build and run automated tests from this test plan.
:type release_environment_definition: :class:`ReleaseEnvironmentDefinitionReference <azure.devops.v7_1.test_plan.models.ReleaseEnvironmentDefinitionReference>`
:param start_date: Start date for the test plan.
:type start_date: datetime
:param state: State of the test plan.
:type state: str
:param test_outcome_settings: Value to configure how same tests across test suites under a test plan need to behave
:type test_outcome_settings: :class:`TestOutcomeSettings <azure.devops.v7_1.test_plan.models.TestOutcomeSettings>`
"""
_attribute_map = {
'area_path': {'key': 'areaPath', 'type': 'str'},
'automated_test_environment': {'key': 'automatedTestEnvironment', 'type': 'TestEnvironment'},
'automated_test_settings': {'key': 'automatedTestSettings', 'type': 'TestSettings'},
'build_definition': {'key': 'buildDefinition', 'type': 'BuildDefinitionReference'},
'build_id': {'key': 'buildId', 'type': 'int'},
'description': {'key': 'description', 'type': 'str'},
'end_date': {'key': 'endDate', 'type': 'iso-8601'},
'iteration': {'key': 'iteration', 'type': 'str'},
'manual_test_environment': {'key': 'manualTestEnvironment', 'type': 'TestEnvironment'},
'manual_test_settings': {'key': 'manualTestSettings', 'type': 'TestSettings'},
'name': {'key': 'name', 'type': 'str'},
'owner': {'key': 'owner', 'type': 'IdentityRef'},
'release_environment_definition': {'key': 'releaseEnvironmentDefinition', 'type': 'ReleaseEnvironmentDefinitionReference'},
'start_date': {'key': 'startDate', 'type': 'iso-8601'},
'state': {'key': 'state', 'type': 'str'},
'test_outcome_settings': {'key': 'testOutcomeSettings', 'type': 'TestOutcomeSettings'}
}
def __init__(self, area_path=None, automated_test_environment=None, automated_test_settings=None, build_definition=None, build_id=None, description=None, end_date=None, iteration=None, manual_test_environment=None, manual_test_settings=None, name=None, owner=None, release_environment_definition=None, start_date=None, state=None, test_outcome_settings=None):
super(TestPlanCreateParams, self).__init__()
self.area_path = area_path
self.automated_test_environment = automated_test_environment
self.automated_test_settings = automated_test_settings
self.build_definition = build_definition
self.build_id = build_id
self.description = description
self.end_date = end_date
self.iteration = iteration
self.manual_test_environment = manual_test_environment
self.manual_test_settings = manual_test_settings
self.name = name
self.owner = owner
self.release_environment_definition = release_environment_definition
self.start_date = start_date
self.state = state
self.test_outcome_settings = test_outcome_settings
class TestPlanReference(Model):
"""
The test plan reference resource.
:param id: ID of the test plan.
:type id: int
:param name: Name of the test plan.
:type name: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, id=None, name=None):
super(TestPlanReference, self).__init__()
self.id = id
self.name = name
class TestPlansHubRefreshData(Model):
"""
This data model is used in TestPlansHubRefreshDataProvider and populates the data required for initial page load
:param define_column_option_fields:
:type define_column_option_fields: list of str
:param define_tab_custom_column_field_map:
:type define_tab_custom_column_field_map: dict
:param error_message:
:type error_message: str
:param execute_column_option_fields:
:type execute_column_option_fields: list of str
:param execute_tab_custom_column_field_map:
:type execute_tab_custom_column_field_map: dict
:param is_advanced_extension_enabled:
:type is_advanced_extension_enabled: bool
:param selected_pivot_id:
:type selected_pivot_id: str
:param selected_suite_id:
:type selected_suite_id: int
:param test_case_page_size:
:type test_case_page_size: int
:param test_cases:
:type test_cases: list of :class:`TestCase <azure.devops.v7_1.test_plan.models.TestCase>`
:param test_cases_continuation_token:
:type test_cases_continuation_token: str
:param test_plan:
:type test_plan: :class:`TestPlanDetailedReference <azure.devops.v7_1.test_plan.models.TestPlanDetailedReference>`
:param test_point_page_size:
:type test_point_page_size: int
:param test_points:
:type test_points: list of :class:`TestPoint <azure.devops.v7_1.test_plan.models.TestPoint>`
:param test_points_continuation_token:
:type test_points_continuation_token: str
:param test_suites:
:type test_suites: list of :class:`TestSuite <azure.devops.v7_1.test_plan.models.TestSuite>`
:param test_suites_continuation_token:
:type test_suites_continuation_token: str
"""
_attribute_map = {
'define_column_option_fields': {'key': 'defineColumnOptionFields', 'type': '[str]'},
'define_tab_custom_column_field_map': {'key': 'defineTabCustomColumnFieldMap', 'type': '{str}'},
'error_message': {'key': 'errorMessage', 'type': 'str'},
'execute_column_option_fields': {'key': 'executeColumnOptionFields', 'type': '[str]'},
'execute_tab_custom_column_field_map': {'key': 'executeTabCustomColumnFieldMap', 'type': '{str}'},
'is_advanced_extension_enabled': {'key': 'isAdvancedExtensionEnabled', 'type': 'bool'},
'selected_pivot_id': {'key': 'selectedPivotId', 'type': 'str'},
'selected_suite_id': {'key': 'selectedSuiteId', 'type': 'int'},
'test_case_page_size': {'key': 'testCasePageSize', 'type': 'int'},
'test_cases': {'key': 'testCases', 'type': '[TestCase]'},
'test_cases_continuation_token': {'key': 'testCasesContinuationToken', 'type': 'str'},
'test_plan': {'key': 'testPlan', 'type': 'TestPlanDetailedReference'},
'test_point_page_size': {'key': 'testPointPageSize', 'type': 'int'},
'test_points': {'key': 'testPoints', 'type': '[TestPoint]'},
'test_points_continuation_token': {'key': 'testPointsContinuationToken', 'type': 'str'},
'test_suites': {'key': 'testSuites', 'type': '[TestSuite]'},
'test_suites_continuation_token': {'key': 'testSuitesContinuationToken', 'type': 'str'}
}
def __init__(self, define_column_option_fields=None, define_tab_custom_column_field_map=None, error_message=None, execute_column_option_fields=None, execute_tab_custom_column_field_map=None, is_advanced_extension_enabled=None, selected_pivot_id=None, selected_suite_id=None, test_case_page_size=None, test_cases=None, test_cases_continuation_token=None, test_plan=None, test_point_page_size=None, test_points=None, test_points_continuation_token=None, test_suites=None, test_suites_continuation_token=None):
super(TestPlansHubRefreshData, self).__init__()
self.define_column_option_fields = define_column_option_fields
self.define_tab_custom_column_field_map = define_tab_custom_column_field_map
self.error_message = error_message
self.execute_column_option_fields = execute_column_option_fields
self.execute_tab_custom_column_field_map = execute_tab_custom_column_field_map
self.is_advanced_extension_enabled = is_advanced_extension_enabled
self.selected_pivot_id = selected_pivot_id
self.selected_suite_id = selected_suite_id
self.test_case_page_size = test_case_page_size
self.test_cases = test_cases
self.test_cases_continuation_token = test_cases_continuation_token
self.test_plan = test_plan
self.test_point_page_size = test_point_page_size
self.test_points = test_points
self.test_points_continuation_token = test_points_continuation_token
self.test_suites = test_suites
self.test_suites_continuation_token = test_suites_continuation_token
class TestPlansLibraryWorkItemFilter(Model):
"""
Container to hold information about a filter being applied in Test Plans Library.
:param field_name: Work item field name on which the items are to be filtered.
:type field_name: str
:param field_values: Work item field values corresponding to the field name.
:type field_values: list of str
:param filter_mode: Mode of the filter.
:type filter_mode: object
"""
_attribute_map = {
'field_name': {'key': 'fieldName', 'type': 'str'},
'field_values': {'key': 'fieldValues', 'type': '[str]'},
'filter_mode': {'key': 'filterMode', 'type': 'object'}
}
def __init__(self, field_name=None, field_values=None, filter_mode=None):
super(TestPlansLibraryWorkItemFilter, self).__init__()
self.field_name = field_name
self.field_values = field_values
self.filter_mode = filter_mode
class TestPlanUpdateParams(TestPlanCreateParams):
"""
The test plan update parameters.
:param area_path: Area of the test plan.
:type area_path: str
:param automated_test_environment:
:type automated_test_environment: :class:`TestEnvironment <azure.devops.v7_1.test_plan.models.TestEnvironment>`
:param automated_test_settings:
:type automated_test_settings: :class:`TestSettings <azure.devops.v7_1.test_plan.models.TestSettings>`
:param build_definition: The Build Definition that generates a build associated with this test plan.
:type build_definition: :class:`BuildDefinitionReference <azure.devops.v7_1.test_plan.models.BuildDefinitionReference>`
:param build_id: Build to be tested.
:type build_id: int
:param description: Description of the test plan.
:type description: str
:param end_date: End date for the test plan.
:type end_date: datetime
:param iteration: Iteration path of the test plan.
:type iteration: str
:param manual_test_environment:
:type manual_test_environment: :class:`TestEnvironment <azure.devops.v7_1.test_plan.models.TestEnvironment>`
:param manual_test_settings:
:type manual_test_settings: :class:`TestSettings <azure.devops.v7_1.test_plan.models.TestSettings>`
:param name: Name of the test plan.
:type name: str
:param owner: Owner of the test plan.
:type owner: :class:`IdentityRef <azure.devops.v7_1.test_plan.models.IdentityRef>`
:param release_environment_definition: Release Environment to be used to deploy the build and run automated tests from this test plan.
:type release_environment_definition: :class:`ReleaseEnvironmentDefinitionReference <azure.devops.v7_1.test_plan.models.ReleaseEnvironmentDefinitionReference>`
:param start_date: Start date for the test plan.
:type start_date: datetime
:param state: State of the test plan.
:type state: str
:param test_outcome_settings: Value to configure how same tests across test suites under a test plan need to behave
:type test_outcome_settings: :class:`TestOutcomeSettings <azure.devops.v7_1.test_plan.models.TestOutcomeSettings>`
:param revision: Revision of the test plan.
:type revision: int
"""
_attribute_map = {
'area_path': {'key': 'areaPath', 'type': 'str'},
'automated_test_environment': {'key': 'automatedTestEnvironment', 'type': 'TestEnvironment'},
'automated_test_settings': {'key': 'automatedTestSettings', 'type': 'TestSettings'},
'build_definition': {'key': 'buildDefinition', 'type': 'BuildDefinitionReference'},
'build_id': {'key': 'buildId', 'type': 'int'},
'description': {'key': 'description', 'type': 'str'},
'end_date': {'key': 'endDate', 'type': 'iso-8601'},
'iteration': {'key': 'iteration', 'type': 'str'},
'manual_test_environment': {'key': 'manualTestEnvironment', 'type': 'TestEnvironment'},
'manual_test_settings': {'key': 'manualTestSettings', 'type': 'TestSettings'},
'name': {'key': 'name', 'type': 'str'},
'owner': {'key': 'owner', 'type': 'IdentityRef'},
'release_environment_definition': {'key': 'releaseEnvironmentDefinition', 'type': 'ReleaseEnvironmentDefinitionReference'},
'start_date': {'key': 'startDate', 'type': 'iso-8601'},
'state': {'key': 'state', 'type': 'str'},
'test_outcome_settings': {'key': 'testOutcomeSettings', 'type': 'TestOutcomeSettings'},
'revision': {'key': 'revision', 'type': 'int'}
}
def __init__(self, area_path=None, automated_test_environment=None, automated_test_settings=None, build_definition=None, build_id=None, description=None, end_date=None, iteration=None, manual_test_environment=None, manual_test_settings=None, name=None, owner=None, release_environment_definition=None, start_date=None, state=None, test_outcome_settings=None, revision=None):
super(TestPlanUpdateParams, self).__init__(area_path=area_path, automated_test_environment=automated_test_environment, automated_test_settings=automated_test_settings, build_definition=build_definition, build_id=build_id, description=description, end_date=end_date, iteration=iteration, manual_test_environment=manual_test_environment, manual_test_settings=manual_test_settings, name=name, owner=owner, release_environment_definition=release_environment_definition, start_date=start_date, state=state, test_outcome_settings=test_outcome_settings)
self.revision = revision
class TestPoint(Model):
"""
Test Point Class
:param comment: Comment associated to the Test Point
:type comment: str
:param configuration: Configuration associated with the Test Point
:type configuration: :class:`TestConfigurationReference <azure.devops.v7_1.test_plan.models.TestConfigurationReference>`
:param id: Id of the Test Point
:type id: int
:param is_active: Variable to decide whether the test case is Active or not
:type is_active: bool
:param is_automated: Is the Test Point for Automated Test Case or Manual
:type is_automated: bool
:param last_reset_to_active: Last Reset to Active Time Stamp for the Test Point
:type last_reset_to_active: datetime
:param last_updated_by: Last Updated details for the Test Point
:type last_updated_by: :class:`IdentityRef <azure.devops.v7_1.test_plan.models.IdentityRef>`
:param last_updated_date: Last Update Time Stamp for the Test Point
:type last_updated_date: datetime
:param links: Reference links
:type links: :class:`ReferenceLinks <azure.devops.v7_1.test_plan.models.ReferenceLinks>`
:param project: Project under which the Test Point is
:type project: :class:`TeamProjectReference <azure.devops.v7_1.test_plan.models.TeamProjectReference>`
:param results: Results associated to the Test Point
:type results: :class:`TestPointResults <azure.devops.v7_1.test_plan.models.TestPointResults>`
:param test_case_reference: Test Case Reference
:type test_case_reference: :class:`TestCaseReference <azure.devops.v7_1.test_plan.models.TestCaseReference>`
:param tester: Tester associated with the Test Point
:type tester: :class:`IdentityRef <azure.devops.v7_1.test_plan.models.IdentityRef>`
:param test_plan: Test Plan under which the Test Point is
:type test_plan: :class:`TestPlanReference <azure.devops.v7_1.test_plan.models.TestPlanReference>`
:param test_suite: Test Suite under which the Test Point is
:type test_suite: :class:`TestSuiteReference <azure.devops.v7_1.test_plan.models.TestSuiteReference>`
"""
_attribute_map = {
'comment': {'key': 'comment', 'type': 'str'},
'configuration': {'key': 'configuration', 'type': 'TestConfigurationReference'},
'id': {'key': 'id', 'type': 'int'},
'is_active': {'key': 'isActive', 'type': 'bool'},
'is_automated': {'key': 'isAutomated', 'type': 'bool'},
'last_reset_to_active': {'key': 'lastResetToActive', 'type': 'iso-8601'},
'last_updated_by': {'key': 'lastUpdatedBy', 'type': 'IdentityRef'},
'last_updated_date': {'key': 'lastUpdatedDate', 'type': 'iso-8601'},
'links': {'key': 'links', 'type': 'ReferenceLinks'},
'project': {'key': 'project', 'type': 'TeamProjectReference'},
'results': {'key': 'results', 'type': 'TestPointResults'},
'test_case_reference': {'key': 'testCaseReference', 'type': 'TestCaseReference'},
'tester': {'key': 'tester', 'type': 'IdentityRef'},
'test_plan': {'key': 'testPlan', 'type': 'TestPlanReference'},
'test_suite': {'key': 'testSuite', 'type': 'TestSuiteReference'}
}
def __init__(self, comment=None, configuration=None, id=None, is_active=None, is_automated=None, last_reset_to_active=None, last_updated_by=None, last_updated_date=None, links=None, project=None, results=None, test_case_reference=None, tester=None, test_plan=None, test_suite=None):
super(TestPoint, self).__init__()
self.comment = comment
self.configuration = configuration
self.id = id
self.is_active = is_active
self.is_automated = is_automated
self.last_reset_to_active = last_reset_to_active
self.last_updated_by = last_updated_by
self.last_updated_date = last_updated_date
self.links = links
self.project = project
self.results = results
self.test_case_reference = test_case_reference
self.tester = tester
self.test_plan = test_plan
self.test_suite = test_suite
class TestPointDetailedReference(Model):
"""
:param configuration:
:type configuration: :class:`TestConfigurationReference <azure.devops.v7_1.test_plan.models.TestConfigurationReference>`
:param plan:
:type plan: :class:`TestPlanReference <azure.devops.v7_1.test_plan.models.TestPlanReference>`
:param point_id:
:type point_id: int
:param suite:
:type suite: :class:`TestSuiteReference <azure.devops.v7_1.test_plan.models.TestSuiteReference>`
:param tester:
:type tester: :class:`IdentityRef <azure.devops.v7_1.test_plan.models.IdentityRef>`
"""
_attribute_map = {
'configuration': {'key': 'configuration', 'type': 'TestConfigurationReference'},
'plan': {'key': 'plan', 'type': 'TestPlanReference'},
'point_id': {'key': 'pointId', 'type': 'int'},
'suite': {'key': 'suite', 'type': 'TestSuiteReference'},
'tester': {'key': 'tester', 'type': 'IdentityRef'}
}
def __init__(self, configuration=None, plan=None, point_id=None, suite=None, tester=None):
super(TestPointDetailedReference, self).__init__()
self.configuration = configuration
self.plan = plan
self.point_id = point_id
self.suite = suite
self.tester = tester
class TestPointResults(Model):
"""
Test Point Results
:param failure_type: Failure Type for the Test Point
:type failure_type: object
:param last_resolution_state: Last Resolution State Id for the Test Point
:type last_resolution_state: object
:param last_result_details: Last Result Details for the Test Point
:type last_result_details: :class:`LastResultDetails <azure.devops.v7_1.test_plan.models.LastResultDetails>`
:param last_result_id: Last Result Id
:type last_result_id: int
:param last_result_state: Last Result State of the Test Point
:type last_result_state: object
:param last_run_build_number: Last RUn Build Number for the Test Point
:type last_run_build_number: str
:param last_test_run_id: Last Test Run Id for the Test Point
:type last_test_run_id: int
:param outcome: Outcome of the Test Point
:type outcome: object
:param state: State of the Test Point
:type state: object
"""
_attribute_map = {
'failure_type': {'key': 'failureType', 'type': 'object'},
'last_resolution_state': {'key': 'lastResolutionState', 'type': 'object'},
'last_result_details': {'key': 'lastResultDetails', 'type': 'LastResultDetails'},
'last_result_id': {'key': 'lastResultId', 'type': 'int'},
'last_result_state': {'key': 'lastResultState', 'type': 'object'},
'last_run_build_number': {'key': 'lastRunBuildNumber', 'type': 'str'},
'last_test_run_id': {'key': 'lastTestRunId', 'type': 'int'},
'outcome': {'key': 'outcome', 'type': 'object'},
'state': {'key': 'state', 'type': 'object'}
}
def __init__(self, failure_type=None, last_resolution_state=None, last_result_details=None, last_result_id=None, last_result_state=None, last_run_build_number=None, last_test_run_id=None, outcome=None, state=None):
super(TestPointResults, self).__init__()
self.failure_type = failure_type
self.last_resolution_state = last_resolution_state
self.last_result_details = last_result_details
self.last_result_id = last_result_id
self.last_result_state = last_result_state
self.last_run_build_number = last_run_build_number
self.last_test_run_id = last_test_run_id
self.outcome = outcome
self.state = state
class TestPointUpdateParams(Model):
"""
Test Point Update Parameters
:param id: Id of Test Point to be updated
:type id: int
:param is_active: Reset the Test Point to Active
:type is_active: bool
:param results: Results of the test point
:type results: :class:`Results <azure.devops.v7_1.test_plan.models.Results>`
:param tester: Tester of the Test Point
:type tester: :class:`IdentityRef <azure.devops.v7_1.test_plan.models.IdentityRef>`
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'is_active': {'key': 'isActive', 'type': 'bool'},
'results': {'key': 'results', 'type': 'Results'},
'tester': {'key': 'tester', 'type': 'IdentityRef'}
}
def __init__(self, id=None, is_active=None, results=None, tester=None):
super(TestPointUpdateParams, self).__init__()
self.id = id
self.is_active = is_active
self.results = results
self.tester = tester
class TestSettings(Model):
"""
Represents the test settings of the run. Used to create test settings and fetch test settings
:param area_path: Area path required to create test settings
:type area_path: str
:param description: Description of the test settings. Used in create test settings.
:type description: str
:param is_public: Indicates if the tests settings is public or private.Used in create test settings.
:type is_public: bool
:param machine_roles: Xml string of machine roles. Used in create test settings.
:type machine_roles: str
:param test_settings_content: Test settings content.
:type test_settings_content: str
:param test_settings_id: Test settings id.
:type test_settings_id: int
:param test_settings_name: Test settings name.
:type test_settings_name: str
"""
_attribute_map = {
'area_path': {'key': 'areaPath', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'is_public': {'key': 'isPublic', 'type': 'bool'},
'machine_roles': {'key': 'machineRoles', 'type': 'str'},
'test_settings_content': {'key': 'testSettingsContent', 'type': 'str'},
'test_settings_id': {'key': 'testSettingsId', 'type': 'int'},
'test_settings_name': {'key': 'testSettingsName', 'type': 'str'}
}
def __init__(self, area_path=None, description=None, is_public=None, machine_roles=None, test_settings_content=None, test_settings_id=None, test_settings_name=None):
super(TestSettings, self).__init__()
self.area_path = area_path
self.description = description
self.is_public = is_public
self.machine_roles = machine_roles
self.test_settings_content = test_settings_content
self.test_settings_id = test_settings_id
self.test_settings_name = test_settings_name
class TestSuiteCreateUpdateCommonParams(Model):
"""
Test Suite Create/Update Common Parameters
:param default_configurations: Test suite default configurations.
:type default_configurations: list of :class:`TestConfigurationReference <azure.devops.v7_1.test_plan.models.TestConfigurationReference>`
:param default_testers: Test suite default testers.
:type default_testers: list of :class:`IdentityRef <azure.devops.v7_1.test_plan.models.IdentityRef>`
:param inherit_default_configurations: Default configuration was inherited or not.
:type inherit_default_configurations: bool
:param name: Name of test suite.
:type name: str
:param parent_suite: Test suite parent shallow reference.
:type parent_suite: :class:`TestSuiteReference <azure.devops.v7_1.test_plan.models.TestSuiteReference>`
:param query_string: Test suite query string, for dynamic suites.
:type query_string: str
"""
_attribute_map = {
'default_configurations': {'key': 'defaultConfigurations', 'type': '[TestConfigurationReference]'},
'default_testers': {'key': 'defaultTesters', 'type': '[IdentityRef]'},
'inherit_default_configurations': {'key': 'inheritDefaultConfigurations', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'parent_suite': {'key': 'parentSuite', 'type': 'TestSuiteReference'},
'query_string': {'key': 'queryString', 'type': 'str'}
}
def __init__(self, default_configurations=None, default_testers=None, inherit_default_configurations=None, name=None, parent_suite=None, query_string=None):
super(TestSuiteCreateUpdateCommonParams, self).__init__()
self.default_configurations = default_configurations
self.default_testers = default_testers
self.inherit_default_configurations = inherit_default_configurations
self.name = name
self.parent_suite = parent_suite
self.query_string = query_string
class TestSuiteReference(Model):
"""
The test suite reference resource.
:param id: ID of the test suite.
:type id: int
:param name: Name of the test suite.
:type name: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, id=None, name=None):
super(TestSuiteReference, self).__init__()
self.id = id
self.name = name
class TestSuiteReferenceWithProject(TestSuiteReference):
"""
Test Suite Reference with Project
:param id: ID of the test suite.
:type id: int
:param name: Name of the test suite.
:type name: str
:param project: Reference of destination Project
:type project: :class:`TeamProjectReference <azure.devops.v7_1.test_plan.models.TeamProjectReference>`
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'project': {'key': 'project', 'type': 'TeamProjectReference'}
}
def __init__(self, id=None, name=None, project=None):
super(TestSuiteReferenceWithProject, self).__init__(id=id, name=name)
self.project = project
class TestSuiteUpdateParams(TestSuiteCreateUpdateCommonParams):
"""
Test Suite Update Parameters
:param default_configurations: Test suite default configurations.
:type default_configurations: list of :class:`TestConfigurationReference <azure.devops.v7_1.test_plan.models.TestConfigurationReference>`
:param default_testers: Test suite default testers.
:type default_testers: list of :class:`IdentityRef <azure.devops.v7_1.test_plan.models.IdentityRef>`
:param inherit_default_configurations: Default configuration was inherited or not.
:type inherit_default_configurations: bool
:param name: Name of test suite.
:type name: str
:param parent_suite: Test suite parent shallow reference.
:type parent_suite: :class:`TestSuiteReference <azure.devops.v7_1.test_plan.models.TestSuiteReference>`
:param query_string: Test suite query string, for dynamic suites.
:type query_string: str
:param revision: Test suite revision.
:type revision: int
"""
_attribute_map = {
'default_configurations': {'key': 'defaultConfigurations', 'type': '[TestConfigurationReference]'},
'default_testers': {'key': 'defaultTesters', 'type': '[IdentityRef]'},
'inherit_default_configurations': {'key': 'inheritDefaultConfigurations', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'parent_suite': {'key': 'parentSuite', 'type': 'TestSuiteReference'},
'query_string': {'key': 'queryString', 'type': 'str'},
'revision': {'key': 'revision', 'type': 'int'}
}
def __init__(self, default_configurations=None, default_testers=None, inherit_default_configurations=None, name=None, parent_suite=None, query_string=None, revision=None):
super(TestSuiteUpdateParams, self).__init__(default_configurations=default_configurations, default_testers=default_testers, inherit_default_configurations=inherit_default_configurations, name=name, parent_suite=parent_suite, query_string=query_string)
self.revision = revision
class TestVariableCreateUpdateParameters(Model):
"""
Test Variable Create or Update Parameters
:param description: Description of the test variable
:type description: str
:param name: Name of the test variable
:type name: str
:param values: List of allowed values
:type values: list of str
"""
_attribute_map = {
'description': {'key': 'description', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'values': {'key': 'values', 'type': '[str]'}
}
def __init__(self, description=None, name=None, values=None):
super(TestVariableCreateUpdateParameters, self).__init__()
self.description = description
self.name = name
self.values = values
class WorkItem(Model):
"""
Work Item
:param id: Id of the Work Item
:type id: int
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'}
}
def __init__(self, id=None):
super(WorkItem, self).__init__()
self.id = id
class WorkItemDetails(Model):
"""
Work Item Class
:param id: Work Item Id
:type id: int
:param name: Work Item Name
:type name: str
:param work_item_fields: Work Item Fields
:type work_item_fields: list of object
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'work_item_fields': {'key': 'workItemFields', 'type': '[object]'}
}
def __init__(self, id=None, name=None, work_item_fields=None):
super(WorkItemDetails, self).__init__()
self.id = id
self.name = name
self.work_item_fields = work_item_fields
class DestinationTestPlanCloneParams(TestPlanCreateParams):
"""
Destination Test Plan create parameters
:param area_path: Area of the test plan.
:type area_path: str
:param automated_test_environment:
:type automated_test_environment: :class:`TestEnvironment <azure.devops.v7_1.test_plan.models.TestEnvironment>`
:param automated_test_settings:
:type automated_test_settings: :class:`TestSettings <azure.devops.v7_1.test_plan.models.TestSettings>`
:param build_definition: The Build Definition that generates a build associated with this test plan.
:type build_definition: :class:`BuildDefinitionReference <azure.devops.v7_1.test_plan.models.BuildDefinitionReference>`
:param build_id: Build to be tested.
:type build_id: int
:param description: Description of the test plan.
:type description: str
:param end_date: End date for the test plan.
:type end_date: datetime
:param iteration: Iteration path of the test plan.
:type iteration: str
:param manual_test_environment:
:type manual_test_environment: :class:`TestEnvironment <azure.devops.v7_1.test_plan.models.TestEnvironment>`
:param manual_test_settings:
:type manual_test_settings: :class:`TestSettings <azure.devops.v7_1.test_plan.models.TestSettings>`
:param name: Name of the test plan.
:type name: str
:param owner: Owner of the test plan.
:type owner: :class:`IdentityRef <azure.devops.v7_1.test_plan.models.IdentityRef>`
:param release_environment_definition: Release Environment to be used to deploy the build and run automated tests from this test plan.
:type release_environment_definition: :class:`ReleaseEnvironmentDefinitionReference <azure.devops.v7_1.test_plan.models.ReleaseEnvironmentDefinitionReference>`
:param start_date: Start date for the test plan.
:type start_date: datetime
:param state: State of the test plan.
:type state: str
:param test_outcome_settings: Value to configure how same tests across test suites under a test plan need to behave
:type test_outcome_settings: :class:`TestOutcomeSettings <azure.devops.v7_1.test_plan.models.TestOutcomeSettings>`
:param project: Destination Project Name
:type project: str
"""
_attribute_map = {
'area_path': {'key': 'areaPath', 'type': 'str'},
'automated_test_environment': {'key': 'automatedTestEnvironment', 'type': 'TestEnvironment'},
'automated_test_settings': {'key': 'automatedTestSettings', 'type': 'TestSettings'},
'build_definition': {'key': 'buildDefinition', 'type': 'BuildDefinitionReference'},
'build_id': {'key': 'buildId', 'type': 'int'},
'description': {'key': 'description', 'type': 'str'},
'end_date': {'key': 'endDate', 'type': 'iso-8601'},
'iteration': {'key': 'iteration', 'type': 'str'},
'manual_test_environment': {'key': 'manualTestEnvironment', 'type': 'TestEnvironment'},
'manual_test_settings': {'key': 'manualTestSettings', 'type': 'TestSettings'},
'name': {'key': 'name', 'type': 'str'},
'owner': {'key': 'owner', 'type': 'IdentityRef'},
'release_environment_definition': {'key': 'releaseEnvironmentDefinition', 'type': 'ReleaseEnvironmentDefinitionReference'},
'start_date': {'key': 'startDate', 'type': 'iso-8601'},
'state': {'key': 'state', 'type': 'str'},
'test_outcome_settings': {'key': 'testOutcomeSettings', 'type': 'TestOutcomeSettings'},
'project': {'key': 'project', 'type': 'str'}
}
def __init__(self, area_path=None, automated_test_environment=None, automated_test_settings=None, build_definition=None, build_id=None, description=None, end_date=None, iteration=None, manual_test_environment=None, manual_test_settings=None, name=None, owner=None, release_environment_definition=None, start_date=None, state=None, test_outcome_settings=None, project=None):
super(DestinationTestPlanCloneParams, self).__init__(area_path=area_path, automated_test_environment=automated_test_environment, automated_test_settings=automated_test_settings, build_definition=build_definition, build_id=build_id, description=description, end_date=end_date, iteration=iteration, manual_test_environment=manual_test_environment, manual_test_settings=manual_test_settings, name=name, owner=owner, release_environment_definition=release_environment_definition, start_date=start_date, state=state, test_outcome_settings=test_outcome_settings)
self.project = project
class SourceTestplanResponse(TestPlanReference):
"""
Source Test Plan Response for Test Plan clone operation
:param id: ID of the test plan.
:type id: int
:param name: Name of the test plan.
:type name: str
:param project: project reference
:type project: :class:`TeamProjectReference <azure.devops.v7_1.test_plan.models.TeamProjectReference>`
:param suite_ids: Id of suites to be cloned inside source Test Plan
:type suite_ids: list of int
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'project': {'key': 'project', 'type': 'TeamProjectReference'},
'suite_ids': {'key': 'suiteIds', 'type': '[int]'}
}
def __init__(self, id=None, name=None, project=None, suite_ids=None):
super(SourceTestplanResponse, self).__init__(id=id, name=name)
self.project = project
self.suite_ids = suite_ids
class SourceTestSuiteResponse(TestSuiteReference):
"""
Source Test Suite Response for Test Case clone operation
:param id: ID of the test suite.
:type id: int
:param name: Name of the test suite.
:type name: str
:param project: project reference
:type project: :class:`TeamProjectReference <azure.devops.v7_1.test_plan.models.TeamProjectReference>`
:param test_case_ids: Id of suites to be cloned inside source Test Plan
:type test_case_ids: list of int
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'project': {'key': 'project', 'type': 'TeamProjectReference'},
'test_case_ids': {'key': 'testCaseIds', 'type': '[int]'}
}
def __init__(self, id=None, name=None, project=None, test_case_ids=None):
super(SourceTestSuiteResponse, self).__init__(id=id, name=name)
self.project = project
self.test_case_ids = test_case_ids
class SuiteEntry(SuiteEntryUpdateParams):
"""
A suite entry defines properties for a test suite.
:param id: Id of the suite entry in the test suite: either a test case id or child suite id.
:type id: int
:param sequence_number: Sequence number for the suite entry object in the test suite.
:type sequence_number: int
:param suite_entry_type: Defines whether the entry is of type test case or suite.
:type suite_entry_type: object
:param suite_id: Id for the test suite.
:type suite_id: int
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'sequence_number': {'key': 'sequenceNumber', 'type': 'int'},
'suite_entry_type': {'key': 'suiteEntryType', 'type': 'object'},
'suite_id': {'key': 'suiteId', 'type': 'int'}
}
def __init__(self, id=None, sequence_number=None, suite_entry_type=None, suite_id=None):
super(SuiteEntry, self).__init__(id=id, sequence_number=sequence_number, suite_entry_type=suite_entry_type)
self.suite_id = suite_id
class TestConfiguration(TestConfigurationCreateUpdateParameters):
"""
Test configuration
:param description: Description of the configuration
:type description: str
:param is_default: Is the configuration a default for the test plans
:type is_default: bool
:param name: Name of the configuration
:type name: str
:param state: State of the configuration
:type state: object
:param values: Dictionary of Test Variable, Selected Value
:type values: list of :class:`NameValuePair <azure.devops.v7_1.test_plan.models.NameValuePair>`
:param id: Id of the configuration
:type id: int
:param project: Id of the test configuration variable
:type project: :class:`TeamProjectReference <azure.devops.v7_1.test_plan.models.TeamProjectReference>`
"""
_attribute_map = {
'description': {'key': 'description', 'type': 'str'},
'is_default': {'key': 'isDefault', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'state': {'key': 'state', 'type': 'object'},
'values': {'key': 'values', 'type': '[NameValuePair]'},
'id': {'key': 'id', 'type': 'int'},
'project': {'key': 'project', 'type': 'TeamProjectReference'}
}
def __init__(self, description=None, is_default=None, name=None, state=None, values=None, id=None, project=None):
super(TestConfiguration, self).__init__(description=description, is_default=is_default, name=name, state=state, values=values)
self.id = id
self.project = project
class TestPlan(TestPlanUpdateParams):
"""
The test plan resource.
:param area_path: Area of the test plan.
:type area_path: str
:param automated_test_environment:
:type automated_test_environment: :class:`TestEnvironment <azure.devops.v7_1.test_plan.models.TestEnvironment>`
:param automated_test_settings:
:type automated_test_settings: :class:`TestSettings <azure.devops.v7_1.test_plan.models.TestSettings>`
:param build_definition: The Build Definition that generates a build associated with this test plan.
:type build_definition: :class:`BuildDefinitionReference <azure.devops.v7_1.test_plan.models.BuildDefinitionReference>`
:param build_id: Build to be tested.
:type build_id: int
:param description: Description of the test plan.
:type description: str
:param end_date: End date for the test plan.
:type end_date: datetime
:param iteration: Iteration path of the test plan.
:type iteration: str
:param manual_test_environment:
:type manual_test_environment: :class:`TestEnvironment <azure.devops.v7_1.test_plan.models.TestEnvironment>`
:param manual_test_settings:
:type manual_test_settings: :class:`TestSettings <azure.devops.v7_1.test_plan.models.TestSettings>`
:param name: Name of the test plan.
:type name: str
:param owner: Owner of the test plan.
:type owner: :class:`IdentityRef <azure.devops.v7_1.test_plan.models.IdentityRef>`
:param release_environment_definition: Release Environment to be used to deploy the build and run automated tests from this test plan.
:type release_environment_definition: :class:`ReleaseEnvironmentDefinitionReference <azure.devops.v7_1.test_plan.models.ReleaseEnvironmentDefinitionReference>`
:param start_date: Start date for the test plan.
:type start_date: datetime
:param state: State of the test plan.
:type state: str
:param test_outcome_settings: Value to configure how same tests across test suites under a test plan need to behave
:type test_outcome_settings: :class:`TestOutcomeSettings <azure.devops.v7_1.test_plan.models.TestOutcomeSettings>`
:param revision: Revision of the test plan.
:type revision: int
:param _links: Relevant links
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.test_plan.models.ReferenceLinks>`
:param id: ID of the test plan.
:type id: int
:param previous_build_id: Previous build Id associated with the test plan
:type previous_build_id: int
:param project: Project which contains the test plan.
:type project: :class:`TeamProjectReference <azure.devops.v7_1.test_plan.models.TeamProjectReference>`
:param root_suite: Root test suite of the test plan.
:type root_suite: :class:`TestSuiteReference <azure.devops.v7_1.test_plan.models.TestSuiteReference>`
:param updated_by: Identity Reference for the last update of the test plan
:type updated_by: :class:`IdentityRef <azure.devops.v7_1.test_plan.models.IdentityRef>`
:param updated_date: Updated date of the test plan
:type updated_date: datetime
"""
_attribute_map = {
'area_path': {'key': 'areaPath', 'type': 'str'},
'automated_test_environment': {'key': 'automatedTestEnvironment', 'type': 'TestEnvironment'},
'automated_test_settings': {'key': 'automatedTestSettings', 'type': 'TestSettings'},
'build_definition': {'key': 'buildDefinition', 'type': 'BuildDefinitionReference'},
'build_id': {'key': 'buildId', 'type': 'int'},
'description': {'key': 'description', 'type': 'str'},
'end_date': {'key': 'endDate', 'type': 'iso-8601'},
'iteration': {'key': 'iteration', 'type': 'str'},
'manual_test_environment': {'key': 'manualTestEnvironment', 'type': 'TestEnvironment'},
'manual_test_settings': {'key': 'manualTestSettings', 'type': 'TestSettings'},
'name': {'key': 'name', 'type': 'str'},
'owner': {'key': 'owner', 'type': 'IdentityRef'},
'release_environment_definition': {'key': 'releaseEnvironmentDefinition', 'type': 'ReleaseEnvironmentDefinitionReference'},
'start_date': {'key': 'startDate', 'type': 'iso-8601'},
'state': {'key': 'state', 'type': 'str'},
'test_outcome_settings': {'key': 'testOutcomeSettings', 'type': 'TestOutcomeSettings'},
'revision': {'key': 'revision', 'type': 'int'},
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'id': {'key': 'id', 'type': 'int'},
'previous_build_id': {'key': 'previousBuildId', 'type': 'int'},
'project': {'key': 'project', 'type': 'TeamProjectReference'},
'root_suite': {'key': 'rootSuite', 'type': 'TestSuiteReference'},
'updated_by': {'key': 'updatedBy', 'type': 'IdentityRef'},
'updated_date': {'key': 'updatedDate', 'type': 'iso-8601'}
}
def __init__(self, area_path=None, automated_test_environment=None, automated_test_settings=None, build_definition=None, build_id=None, description=None, end_date=None, iteration=None, manual_test_environment=None, manual_test_settings=None, name=None, owner=None, release_environment_definition=None, start_date=None, state=None, test_outcome_settings=None, revision=None, _links=None, id=None, previous_build_id=None, project=None, root_suite=None, updated_by=None, updated_date=None):
super(TestPlan, self).__init__(area_path=area_path, automated_test_environment=automated_test_environment, automated_test_settings=automated_test_settings, build_definition=build_definition, build_id=build_id, description=description, end_date=end_date, iteration=iteration, manual_test_environment=manual_test_environment, manual_test_settings=manual_test_settings, name=name, owner=owner, release_environment_definition=release_environment_definition, start_date=start_date, state=state, test_outcome_settings=test_outcome_settings, revision=revision)
self._links = _links
self.id = id
self.previous_build_id = previous_build_id
self.project = project
self.root_suite = root_suite
self.updated_by = updated_by
self.updated_date = updated_date
class TestPlanDetailedReference(TestPlanReference):
"""
The test plan detailed reference resource. Contains additional workitem realted information
:param id: ID of the test plan.
:type id: int
:param name: Name of the test plan.
:type name: str
:param area_path: Area of the test plan.
:type area_path: str
:param end_date: End date for the test plan.
:type end_date: datetime
:param iteration: Iteration path of the test plan.
:type iteration: str
:param root_suite_id: Root Suite Id
:type root_suite_id: int
:param start_date: Start date for the test plan.
:type start_date: datetime
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'area_path': {'key': 'areaPath', 'type': 'str'},
'end_date': {'key': 'endDate', 'type': 'iso-8601'},
'iteration': {'key': 'iteration', 'type': 'str'},
'root_suite_id': {'key': 'rootSuiteId', 'type': 'int'},
'start_date': {'key': 'startDate', 'type': 'iso-8601'}
}
def __init__(self, id=None, name=None, area_path=None, end_date=None, iteration=None, root_suite_id=None, start_date=None):
super(TestPlanDetailedReference, self).__init__(id=id, name=name)
self.area_path = area_path
self.end_date = end_date
self.iteration = iteration
self.root_suite_id = root_suite_id
self.start_date = start_date
class TestSuiteCreateParams(TestSuiteCreateUpdateCommonParams):
"""
Test suite Create Parameters
:param default_configurations: Test suite default configurations.
:type default_configurations: list of :class:`TestConfigurationReference <azure.devops.v7_1.test_plan.models.TestConfigurationReference>`
:param default_testers: Test suite default testers.
:type default_testers: list of :class:`IdentityRef <azure.devops.v7_1.test_plan.models.IdentityRef>`
:param inherit_default_configurations: Default configuration was inherited or not.
:type inherit_default_configurations: bool
:param name: Name of test suite.
:type name: str
:param parent_suite: Test suite parent shallow reference.
:type parent_suite: :class:`TestSuiteReference <azure.devops.v7_1.test_plan.models.TestSuiteReference>`
:param query_string: Test suite query string, for dynamic suites.
:type query_string: str
:param requirement_id: Test suite requirement id.
:type requirement_id: int
:param suite_type: Test suite type.
:type suite_type: object
"""
_attribute_map = {
'default_configurations': {'key': 'defaultConfigurations', 'type': '[TestConfigurationReference]'},
'default_testers': {'key': 'defaultTesters', 'type': '[IdentityRef]'},
'inherit_default_configurations': {'key': 'inheritDefaultConfigurations', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'parent_suite': {'key': 'parentSuite', 'type': 'TestSuiteReference'},
'query_string': {'key': 'queryString', 'type': 'str'},
'requirement_id': {'key': 'requirementId', 'type': 'int'},
'suite_type': {'key': 'suiteType', 'type': 'object'}
}
def __init__(self, default_configurations=None, default_testers=None, inherit_default_configurations=None, name=None, parent_suite=None, query_string=None, requirement_id=None, suite_type=None):
super(TestSuiteCreateParams, self).__init__(default_configurations=default_configurations, default_testers=default_testers, inherit_default_configurations=inherit_default_configurations, name=name, parent_suite=parent_suite, query_string=query_string)
self.requirement_id = requirement_id
self.suite_type = suite_type
class TestVariable(TestVariableCreateUpdateParameters):
"""
Test Variable
:param description: Description of the test variable
:type description: str
:param name: Name of the test variable
:type name: str
:param values: List of allowed values
:type values: list of str
:param id: Id of the test variable
:type id: int
:param project: Id of the test variable
:type project: :class:`TeamProjectReference <azure.devops.v7_1.test_plan.models.TeamProjectReference>`
"""
_attribute_map = {
'description': {'key': 'description', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'values': {'key': 'values', 'type': '[str]'},
'id': {'key': 'id', 'type': 'int'},
'project': {'key': 'project', 'type': 'TeamProjectReference'}
}
def __init__(self, description=None, name=None, values=None, id=None, project=None):
super(TestVariable, self).__init__(description=description, name=name, values=values)
self.id = id
self.project = project
class TestSuite(TestSuiteCreateParams):
"""
Test suite
:param default_configurations: Test suite default configurations.
:type default_configurations: list of :class:`TestConfigurationReference <azure.devops.v7_1.test_plan.models.TestConfigurationReference>`
:param default_testers: Test suite default testers.
:type default_testers: list of :class:`IdentityRef <azure.devops.v7_1.test_plan.models.IdentityRef>`
:param inherit_default_configurations: Default configuration was inherited or not.
:type inherit_default_configurations: bool
:param name: Name of test suite.
:type name: str
:param parent_suite: Test suite parent shallow reference.
:type parent_suite: :class:`TestSuiteReference <azure.devops.v7_1.test_plan.models.TestSuiteReference>`
:param query_string: Test suite query string, for dynamic suites.
:type query_string: str
:param requirement_id: Test suite requirement id.
:type requirement_id: int
:param suite_type: Test suite type.
:type suite_type: object
:param _links: Links: self, testPoints, testCases, parent
:type _links: :class:`ReferenceLinks <azure.devops.v7_1.test_plan.models.ReferenceLinks>`
:param has_children: Boolean value dictating if Child test suites are present
:type has_children: bool
:param children: Child test suites of current test suite.
:type children: list of :class:`TestSuite <azure.devops.v7_1.test_plan.models.TestSuite>`
:param id: Id of test suite.
:type id: int
:param last_error: Last error for test suite.
:type last_error: str
:param last_populated_date: Last populated date.
:type last_populated_date: datetime
:param last_updated_by: IdentityRef of user who has updated test suite recently.
:type last_updated_by: :class:`IdentityRef <azure.devops.v7_1.test_plan.models.IdentityRef>`
:param last_updated_date: Last update date.
:type last_updated_date: datetime
:param plan: Test plan to which the test suite belongs.
:type plan: :class:`TestPlanReference <azure.devops.v7_1.test_plan.models.TestPlanReference>`
:param project: Test suite project shallow reference.
:type project: :class:`TeamProjectReference <azure.devops.v7_1.test_plan.models.TeamProjectReference>`
:param revision: Test suite revision.
:type revision: int
"""
_attribute_map = {
'default_configurations': {'key': 'defaultConfigurations', 'type': '[TestConfigurationReference]'},
'default_testers': {'key': 'defaultTesters', 'type': '[IdentityRef]'},
'inherit_default_configurations': {'key': 'inheritDefaultConfigurations', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'parent_suite': {'key': 'parentSuite', 'type': 'TestSuiteReference'},
'query_string': {'key': 'queryString', 'type': 'str'},
'requirement_id': {'key': 'requirementId', 'type': 'int'},
'suite_type': {'key': 'suiteType', 'type': 'object'},
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'has_children': {'key': 'hasChildren', 'type': 'bool'},
'children': {'key': 'children', 'type': '[TestSuite]'},
'id': {'key': 'id', 'type': 'int'},
'last_error': {'key': 'lastError', 'type': 'str'},
'last_populated_date': {'key': 'lastPopulatedDate', 'type': 'iso-8601'},
'last_updated_by': {'key': 'lastUpdatedBy', 'type': 'IdentityRef'},
'last_updated_date': {'key': 'lastUpdatedDate', 'type': 'iso-8601'},
'plan': {'key': 'plan', 'type': 'TestPlanReference'},
'project': {'key': 'project', 'type': 'TeamProjectReference'},
'revision': {'key': 'revision', 'type': 'int'}
}
def __init__(self, default_configurations=None, default_testers=None, inherit_default_configurations=None, name=None, parent_suite=None, query_string=None, requirement_id=None, suite_type=None, _links=None, has_children=None, children=None, id=None, last_error=None, last_populated_date=None, last_updated_by=None, last_updated_date=None, plan=None, project=None, revision=None):
super(TestSuite, self).__init__(default_configurations=default_configurations, default_testers=default_testers, inherit_default_configurations=inherit_default_configurations, name=name, parent_suite=parent_suite, query_string=query_string, requirement_id=requirement_id, suite_type=suite_type)
self._links = _links
self.has_children = has_children
self.children = children
self.id = id
self.last_error = last_error
self.last_populated_date = last_populated_date
self.last_updated_by = last_updated_by
self.last_updated_date = last_updated_date
self.plan = plan
self.project = project
self.revision = revision
__all__ = [
'BuildDefinitionReference',
'CloneOperationCommonResponse',
'CloneOptions',
'CloneStatistics',
'CloneTestCaseOperationInformation',
'CloneTestCaseOptions',
'CloneTestCaseParams',
'CloneTestPlanOperationInformation',
'CloneTestPlanParams',
'CloneTestSuiteOperationInformation',
'CloneTestSuiteParams',
'Configuration',
'DestinationTestSuiteInfo',
'GraphSubjectBase',
'IdentityRef',
'LastResultDetails',
'LibraryWorkItemsData',
'LibraryWorkItemsDataProviderRequest',
'NameValuePair',
'PointAssignment',
'ReferenceLinks',
'ReleaseEnvironmentDefinitionReference',
'Results',
'SourceTestPlanInfo',
'SourceTestSuiteInfo',
'SuiteEntryUpdateParams',
'SuiteTestCaseCreateUpdateParameters',
'TeamProjectReference',
'TestCase',
'TestCaseAssociatedResult',
'TestCaseReference',
'TestCaseResultsData',
'TestConfigurationCreateUpdateParameters',
'TestConfigurationReference',
'TestEntityCount',
'TestEnvironment',
'TestOutcomeSettings',
'TestPlanCreateParams',
'TestPlanReference',
'TestPlansHubRefreshData',
'TestPlansLibraryWorkItemFilter',
'TestPlanUpdateParams',
'TestPoint',
'TestPointDetailedReference',
'TestPointResults',
'TestPointUpdateParams',
'TestSettings',
'TestSuiteCreateUpdateCommonParams',
'TestSuiteReference',
'TestSuiteReferenceWithProject',
'TestSuiteUpdateParams',
'TestVariableCreateUpdateParameters',
'WorkItem',
'WorkItemDetails',
'DestinationTestPlanCloneParams',
'SourceTestplanResponse',
'SourceTestSuiteResponse',
'SuiteEntry',
'TestConfiguration',
'TestPlan',
'TestPlanDetailedReference',
'TestSuiteCreateParams',
'TestVariable',
'TestSuite',
]
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/test_plan/models.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/test_plan/models.py",
"repo_id": "azure-devops-python-api",
"token_count": 39215
}
| 370 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest import Serializer, Deserializer
from ...client import Client
from . import models
class UPackPackagingClient(Client):
"""UPackPackaging
:param str base_url: Service URL
:param Authentication creds: Authenticated credentials.
"""
def __init__(self, base_url=None, creds=None):
super(UPackPackagingClient, self).__init__(base_url, creds)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
resource_area_identifier = 'd397749b-f115-4027-b6dd-77a65dd10d21'
def add_package(self, metadata, feed_id, package_name, package_version, project=None):
"""AddPackage.
[Preview API]
:param :class:`<UPackPackagePushMetadata> <azure.devops.v7_1.upack_packaging.models.UPackPackagePushMetadata>` metadata:
:param str feed_id:
:param str package_name:
:param str package_version:
:param str project: Project ID or project name
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str')
if package_name is not None:
route_values['packageName'] = self._serialize.url('package_name', package_name, 'str')
if package_version is not None:
route_values['packageVersion'] = self._serialize.url('package_version', package_version, 'str')
content = self._serialize.body(metadata, 'UPackPackagePushMetadata')
self._send(http_method='PUT',
location_id='4cdb2ced-0758-4651-8032-010f070dd7e5',
version='7.1-preview.1',
route_values=route_values,
content=content)
def get_package_metadata(self, feed_id, package_name, package_version, project=None, intent=None):
"""GetPackageMetadata.
[Preview API]
:param str feed_id:
:param str package_name:
:param str package_version:
:param str project: Project ID or project name
:param str intent:
:rtype: :class:`<UPackPackageMetadata> <azure.devops.v7_1.upack_packaging.models.UPackPackageMetadata>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str')
if package_name is not None:
route_values['packageName'] = self._serialize.url('package_name', package_name, 'str')
if package_version is not None:
route_values['packageVersion'] = self._serialize.url('package_version', package_version, 'str')
query_parameters = {}
if intent is not None:
query_parameters['intent'] = self._serialize.query('intent', intent, 'str')
response = self._send(http_method='GET',
location_id='4cdb2ced-0758-4651-8032-010f070dd7e5',
version='7.1-preview.1',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('UPackPackageMetadata', response)
def get_package_versions_metadata(self, feed_id, package_name, project=None):
"""GetPackageVersionsMetadata.
[Preview API]
:param str feed_id:
:param str package_name:
:param str project: Project ID or project name
:rtype: :class:`<UPackLimitedPackageMetadataListResponse> <azure.devops.v7_1.upack_packaging.models.UPackLimitedPackageMetadataListResponse>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if feed_id is not None:
route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str')
if package_name is not None:
route_values['packageName'] = self._serialize.url('package_name', package_name, 'str')
response = self._send(http_method='GET',
location_id='4cdb2ced-0758-4651-8032-010f070dd7e5',
version='7.1-preview.1',
route_values=route_values)
return self._deserialize('UPackLimitedPackageMetadataListResponse', response)
|
azure-devops-python-api/azure-devops/azure/devops/v7_1/upack_packaging/upack_packaging_client.py/0
|
{
"file_path": "azure-devops-python-api/azure-devops/azure/devops/v7_1/upack_packaging/upack_packaging_client.py",
"repo_id": "azure-devops-python-api",
"token_count": 2103
}
| 371 |
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
// README at: https://github.com/devcontainers/templates/tree/main/src/python
{
"name": "Generate Recordings",
// Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile
"image": "mcr.microsoft.com/devcontainers/python:3.12",
"features": {
"ghcr.io/devcontainers/features/conda:1": {},
"ghcr.io/devcontainers/features/powershell:1": {}
},
"customizations": {
"codespaces": {
"openFiles": []
},
"vscode": {
"extensions": [
"quantum.qsharp-lang-vscode",
"ms-python.python"
]
}
},
"secrets": {
"AZURE_CLIENT_ID": {
"description": "The client ID of your Azure Quantum service principal."
},
"AZURE_CLIENT_SECRET": {
"description": "The client secret of your Azure Quantum service principal."
},
"AZURE_TENANT_ID": {
"description": "The tenant ID of your Azure Quantum service principal."
},
"AZURE_QUANTUM_SUBSCRIPTION_ID": {
"description": "The subscription ID of your Azure Quantum workspace."
},
"AZURE_QUANTUM_WORKSPACE_RG": {
"description": "The resource group of your Azure Quantum workspace."
},
"AZURE_QUANTUM_WORKSPACE_NAME": {
"description": "The name of your Azure Quantum workspace."
},
"AZURE_QUANTUM_WORKSPACE_LOCATION": {
"description": "The location of your Azure Quantum workspace."
}
}
}
|
azure-quantum-python/.devcontainer/recordings/devcontainer.json/0
|
{
"file_path": "azure-quantum-python/.devcontainer/recordings/devcontainer.json",
"repo_id": "azure-quantum-python",
"token_count": 747
}
| 372 |
##
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
##
import json
from json.decoder import JSONDecodeError
import logging
import os
import time
from azure.identity import CredentialUnavailableError
from azure.core.credentials import AccessToken
from azure.quantum._constants import EnvironmentVariables
_LOGGER = logging.getLogger(__name__)
class _TokenFileCredential(object):
"""
Implements a custom TokenCredential to use a local file as
the source for an AzureQuantum token.
It will only use the local file if the AZURE_QUANTUM_TOKEN_FILE
environment variable is set, and references an existing json file
that contains the access_token and expires_on timestamp in milliseconds.
If the environment variable is not set, the file does not exist,
or the token is invalid in any way (expired, for example),
then the credential will throw CredentialUnavailableError,
so that _ChainedTokenCredential can fallback to other methods.
"""
def __init__(self):
self.token_file = os.environ.get(EnvironmentVariables.QUANTUM_TOKEN_FILE)
if self.token_file:
_LOGGER.debug("Using provided token file location: %s", self.token_file)
else:
_LOGGER.debug("No token file location provided for %s environment variable.",
EnvironmentVariables.QUANTUM_TOKEN_FILE)
def get_token(self, *scopes: str, **kwargs) -> AccessToken: # pylint:disable=unused-argument
"""Request an access token for `scopes`.
This method is called automatically by Azure SDK clients.
This method only returns tokens for the https://quantum.microsoft.com/.default scope.
:param str scopes: desired scopes for the access token.
:raises ~azure.identity.CredentialUnavailableError
when failing to get the token.
The exception has a `message` attribute with the error message.
"""
if not self.token_file:
raise CredentialUnavailableError(message="Token file location not set.")
if not os.path.isfile(self.token_file):
raise CredentialUnavailableError(
message=f"Token file at {self.token_file} does not exist.")
try:
token = self._parse_token_file(self.token_file)
except JSONDecodeError as exception:
raise CredentialUnavailableError(
message="Failed to parse token file: Invalid JSON.") from exception
except KeyError as exception:
raise CredentialUnavailableError(
message="Failed to parse token file: Missing expected value: "
+ str(exception)) from exception
except Exception as exception:
raise CredentialUnavailableError(
message="Failed to parse token file: " + str(exception)) from exception
if token.expires_on <= time.time():
raise CredentialUnavailableError(
message=f"Token already expired at {time.asctime(time.gmtime(token.expires_on))}")
return token
def _parse_token_file(self, path) -> AccessToken:
with open(path, mode="r", encoding="utf-8") as file:
data = json.load(file)
# Convert ms to seconds, since python time.time only handles epoch time in seconds
expires_on = int(data["expires_on"]) / 1000
token = AccessToken(data["access_token"], expires_on)
return token
|
azure-quantum-python/azure-quantum/azure/quantum/_authentication/_token.py/0
|
{
"file_path": "azure-quantum-python/azure-quantum/azure/quantum/_authentication/_token.py",
"repo_id": "azure-quantum-python",
"token_count": 1326
}
| 373 |
##
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
##
from __future__ import annotations
import re
import os
from re import Match
from typing import (
Optional,
Callable,
Union,
Any
)
from azure.core.credentials import AzureKeyCredential
from azure.core.pipeline.policies import AzureKeyCredentialPolicy
from azure.quantum._authentication import _DefaultAzureCredential
from azure.quantum._constants import (
EnvironmentKind,
EnvironmentVariables,
ConnectionConstants,
GUID_REGEX_PATTERN,
)
class WorkspaceConnectionParams:
"""
Internal Azure Quantum Python SDK class to handle logic
for the parameters needed to connect to a Workspace.
"""
RESOURCE_ID_REGEX = re.compile(
fr"""
^
/subscriptions/(?P<subscription_id>{GUID_REGEX_PATTERN})
/resourceGroups/(?P<resource_group>[^\s/]+)
/providers/Microsoft\.Quantum
/Workspaces/(?P<workspace_name>[^\s/]+)
$
""",
re.VERBOSE | re.IGNORECASE)
CONNECTION_STRING_REGEX = re.compile(
fr"""
^
SubscriptionId=(?P<subscription_id>{GUID_REGEX_PATTERN});
ResourceGroupName=(?P<resource_group>[^\s;]+);
WorkspaceName=(?P<workspace_name>[^\s;]+);
ApiKey=(?P<api_key>[^\s;]+);
QuantumEndpoint=(?P<quantum_endpoint>https://(?P<location>[^\s\.]+).quantum(?:-test)?.azure.com/);
""",
re.VERBOSE | re.IGNORECASE)
def __init__(
self,
subscription_id: Optional[str] = None,
resource_group: Optional[str] = None,
workspace_name: Optional[str] = None,
location: Optional[str] = None,
quantum_endpoint: Optional[str] = None,
arm_endpoint: Optional[str] = None,
environment: Union[str, EnvironmentKind, None] = None,
credential: Optional[object] = None,
resource_id: Optional[str] = None,
user_agent: Optional[str] = None,
user_agent_app_id: Optional[str] = None,
tenant_id: Optional[str] = None,
client_id: Optional[str] = None,
api_version: Optional[str] = None,
connection_string: Optional[str] = None,
on_new_client_request: Optional[Callable] = None,
):
# fields are used for these properties since
# they have special getters/setters
self._location = None
self._environment = None
self._quantum_endpoint = None
self._arm_endpoint = None
# regular connection properties
self.subscription_id = None
self.resource_group = None
self.workspace_name = None
self.credential = None
self.user_agent = None
self.user_agent_app_id = None
self.client_id = None
self.tenant_id = None
self.api_version = None
# callback to create a new client if needed
# for example, when changing the user agent
self.on_new_client_request = on_new_client_request
# merge the connection parameters passed
# connection_string is set first as it
# should be overridden by other parameters
self.apply_connection_string(connection_string)
self.merge(
api_version=api_version,
arm_endpoint=arm_endpoint,
quantum_endpoint=quantum_endpoint,
client_id=client_id,
credential=credential,
environment=environment,
location=location,
resource_group=resource_group,
subscription_id=subscription_id,
tenant_id=tenant_id,
user_agent=user_agent,
user_agent_app_id=user_agent_app_id,
workspace_name=workspace_name,
)
self.apply_resource_id(resource_id=resource_id)
@property
def location(self):
"""
The Azure location.
On the setter, we normalize the value removing spaces
and converting it to lowercase.
"""
return self._location
@location.setter
def location(self, value: str):
self._location = (value.replace(" ", "").lower()
if isinstance(value, str)
else value)
@property
def environment(self):
"""
The environment kind, such as dogfood, canary or production.
Defaults to EnvironmentKind.PRODUCTION
"""
return self._environment or EnvironmentKind.PRODUCTION
@environment.setter
def environment(self, value: Union[str, EnvironmentKind]):
self._environment = (EnvironmentKind[value.upper()]
if isinstance(value, str)
else value)
@property
def quantum_endpoint(self):
"""
The Azure Quantum data plane endpoint.
Defaults to well-known endpoint based on the environment.
"""
if self._quantum_endpoint:
return self._quantum_endpoint
if not self.location:
raise ValueError("Location not specified")
if self.environment is EnvironmentKind.PRODUCTION:
return ConnectionConstants.GET_QUANTUM_PRODUCTION_ENDPOINT(self.location)
if self.environment is EnvironmentKind.CANARY:
return ConnectionConstants.GET_QUANTUM_CANARY_ENDPOINT(self.location)
if self.environment is EnvironmentKind.DOGFOOD:
return ConnectionConstants.GET_QUANTUM_DOGFOOD_ENDPOINT(self.location)
raise ValueError(f"Unknown environment `{self.environment}`.")
@quantum_endpoint.setter
def quantum_endpoint(self, value: str):
self._quantum_endpoint = value
@property
def arm_endpoint(self):
"""
The control plane endpoint.
Defaults to well-known arm_endpoint based on the environment.
"""
if self._arm_endpoint:
return self._arm_endpoint
if self.environment is EnvironmentKind.DOGFOOD:
return ConnectionConstants.ARM_DOGFOOD_ENDPOINT
if self.environment in [EnvironmentKind.PRODUCTION,
EnvironmentKind.CANARY]:
return ConnectionConstants.ARM_PRODUCTION_ENDPOINT
raise ValueError(f"Unknown environment `{self.environment}`.")
@arm_endpoint.setter
def arm_endpoint(self, value: str):
self._arm_endpoint = value
@property
def api_key(self):
"""
The api-key stored in a AzureKeyCredential.
"""
return (self.credential.key
if isinstance(self.credential, AzureKeyCredential)
else None)
@api_key.setter
def api_key(self, value: str):
if value:
self.credential = AzureKeyCredential(value)
self._api_key = value
def __repr__(self):
"""
Print all fields and properties.
"""
info = []
for key in vars(self):
info.append(f" {key}: {self.__dict__[key]}")
cls = type(self)
for key in dir(self):
attr = getattr(cls, key, None)
if attr and isinstance(attr, property) and attr.fget:
info.append(f" {key}: {attr.fget(self)}")
info.sort()
info.insert(0, super().__repr__())
return "\n".join(info)
def apply_resource_id(self, resource_id: str):
"""
Parses the resource_id and set the connection
parameters obtained from it.
"""
if resource_id:
match = re.search(
WorkspaceConnectionParams.RESOURCE_ID_REGEX,
resource_id)
if not match:
raise ValueError("Invalid resource id")
self._merge_re_match(match)
def apply_connection_string(self, connection_string: str):
"""
Parses the connection_string and set the connection
parameters obtained from it.
"""
if connection_string:
match = re.search(
WorkspaceConnectionParams.CONNECTION_STRING_REGEX,
connection_string)
if not match:
raise ValueError("Invalid connection string")
self._merge_re_match(match)
def merge(
self,
subscription_id: Optional[str] = None,
resource_group: Optional[str] = None,
workspace_name: Optional[str] = None,
location: Optional[str] = None,
quantum_endpoint: Optional[str] = None,
arm_endpoint: Optional[str] = None,
environment: Union[str, EnvironmentKind, None] = None,
credential: Optional[object] = None,
user_agent: Optional[str] = None,
user_agent_app_id: Optional[str] = None,
tenant_id: Optional[str] = None,
client_id: Optional[str] = None,
api_version: Optional[str] = None,
api_key: Optional[str] = None,
):
"""
Set all fields/properties with `not None` values
passed in the (named or key-valued) arguments
into this instance.
"""
self._merge(
api_version=api_version,
arm_endpoint=arm_endpoint,
quantum_endpoint=quantum_endpoint,
client_id=client_id,
credential=credential,
environment=environment,
location=location,
resource_group=resource_group,
subscription_id=subscription_id,
tenant_id=tenant_id,
user_agent=user_agent,
user_agent_app_id=user_agent_app_id,
workspace_name=workspace_name,
api_key=api_key,
merge_default_mode=False,
)
return self
def apply_defaults(
self,
subscription_id: Optional[str] = None,
resource_group: Optional[str] = None,
workspace_name: Optional[str] = None,
location: Optional[str] = None,
quantum_endpoint: Optional[str] = None,
arm_endpoint: Optional[str] = None,
environment: Union[str, EnvironmentKind, None] = None,
credential: Optional[object] = None,
user_agent: Optional[str] = None,
user_agent_app_id: Optional[str] = None,
tenant_id: Optional[str] = None,
client_id: Optional[str] = None,
api_version: Optional[str] = None,
api_key: Optional[str] = None,
) -> WorkspaceConnectionParams:
"""
Set all fields/properties with `not None` values
passed in the (named or key-valued) arguments
into this instance IF the instance does not have
the corresponding parameter set yet.
"""
self._merge(
api_version=api_version,
arm_endpoint=arm_endpoint,
quantum_endpoint=quantum_endpoint,
client_id=client_id,
credential=credential,
environment=environment,
location=location,
resource_group=resource_group,
subscription_id=subscription_id,
tenant_id=tenant_id,
user_agent=user_agent,
user_agent_app_id=user_agent_app_id,
workspace_name=workspace_name,
api_key=api_key,
merge_default_mode=True,
)
return self
def _merge(
self,
merge_default_mode: bool,
subscription_id: Optional[str] = None,
resource_group: Optional[str] = None,
workspace_name: Optional[str] = None,
location: Optional[str] = None,
quantum_endpoint: Optional[str] = None,
arm_endpoint: Optional[str] = None,
environment: Union[str, EnvironmentKind, None] = None,
credential: Optional[object] = None,
user_agent: Optional[str] = None,
user_agent_app_id: Optional[str] = None,
tenant_id: Optional[str] = None,
client_id: Optional[str] = None,
api_version: Optional[str] = None,
api_key: Optional[str] = None,
):
"""
Set all fields/properties with `not None` values
passed in the kwargs arguments
into this instance.
If merge_default_mode is True, skip setting
the field/property if it already has a value.
"""
def _get_value_or_default(old_value, new_value):
if merge_default_mode and old_value:
return old_value
if new_value:
return new_value
return old_value
self.subscription_id = _get_value_or_default(self.subscription_id, subscription_id)
self.resource_group = _get_value_or_default(self.resource_group, resource_group)
self.workspace_name = _get_value_or_default(self.workspace_name, workspace_name)
self.location = _get_value_or_default(self.location, location)
self.environment = _get_value_or_default(self.environment, environment)
self.credential = _get_value_or_default(self.credential, credential)
self.user_agent = _get_value_or_default(self.user_agent, user_agent)
self.user_agent_app_id = _get_value_or_default(self.user_agent_app_id, user_agent_app_id)
self.client_id = _get_value_or_default(self.client_id, client_id)
self.tenant_id = _get_value_or_default(self.tenant_id, tenant_id)
self.api_version = _get_value_or_default(self.api_version, api_version)
self.api_key = _get_value_or_default(self.api_key, api_key)
# for these properties that have a default value in the getter, we use
# the private field as the old_value
self.quantum_endpoint = _get_value_or_default(self._quantum_endpoint, quantum_endpoint)
self.arm_endpoint = _get_value_or_default(self._arm_endpoint, arm_endpoint)
return self
def _merge_connection_params(
self,
connection_params: WorkspaceConnectionParams,
merge_default_mode: bool = False,
) -> WorkspaceConnectionParams:
"""
Set all fields/properties with `not None` values
from the `connection_params` into this instance.
"""
self._merge(
api_version=connection_params.api_version,
client_id=connection_params.client_id,
credential=connection_params.credential,
environment=connection_params.environment,
location=connection_params.location,
resource_group=connection_params.resource_group,
subscription_id=connection_params.subscription_id,
tenant_id=connection_params.tenant_id,
user_agent=connection_params.user_agent,
user_agent_app_id=connection_params.user_agent_app_id,
workspace_name=connection_params.workspace_name,
merge_default_mode=merge_default_mode,
# for these properties that have a default value in the getter,
# so we use the private field instead
# pylint: disable=protected-access
arm_endpoint=connection_params._arm_endpoint,
quantum_endpoint=connection_params._quantum_endpoint,
)
return self
def get_credential_or_default(self) -> Any:
"""
Get the credential if one was set,
or defaults to a new _DefaultAzureCredential.
"""
return (self.credential
or _DefaultAzureCredential(
subscription_id=self.subscription_id,
arm_endpoint=self.arm_endpoint,
tenant_id=self.tenant_id))
def get_auth_policy(self) -> Any:
"""
Returns a AzureKeyCredentialPolicy if using an AzureKeyCredential.
Defaults to None.
"""
if isinstance(self.credential, AzureKeyCredential):
return AzureKeyCredentialPolicy(self.credential,
ConnectionConstants.QUANTUM_API_KEY_HEADER)
return None
def append_user_agent(self, value: str):
"""
Append a new value to the Workspace's UserAgent and re-initialize the
QuantumClient. The values are appended using a dash.
:param value: UserAgent value to add, e.g. "azure-quantum-<plugin>"
"""
new_user_agent = None
if (
value
and value not in (self.user_agent or "")
):
new_user_agent = (f"{self.user_agent}-{value}"
if self.user_agent else value)
if new_user_agent != self.user_agent:
self.user_agent = new_user_agent
if self.on_new_client_request:
self.on_new_client_request()
def get_full_user_agent(self):
"""
Get the full Azure Quantum Python SDK UserAgent
that is sent to the service via the header.
"""
full_user_agent = self.user_agent
app_id = self.user_agent_app_id
if self.user_agent_app_id:
full_user_agent = (f"{app_id} {full_user_agent}"
if full_user_agent else app_id)
return full_user_agent
def is_complete(self) -> bool:
"""
Returns true if we have all necessary parameters
to connect to the Azure Quantum Workspace.
"""
return (self.location
and self.subscription_id
and self.resource_group
and self.workspace_name
and self.get_credential_or_default())
def assert_complete(self):
"""
Raises ValueError if we don't have all necessary parameters
to connect to the Azure Quantum Workspace.
"""
if not self.is_complete():
raise ValueError(
"""
Azure Quantum workspace not fully specified.
Please specify one of the following:
1) A valid combination of location and resource ID.
2) A valid combination of location, subscription ID,
resource group name, and workspace name.
3) A valid connection string (via Workspace.from_connection_string()).
""")
def default_from_env_vars(self) -> WorkspaceConnectionParams:
"""
Apply default values found in the environment variables
if current parameters are not set.
"""
self.subscription_id = (self.subscription_id
or os.environ.get(EnvironmentVariables.QUANTUM_SUBSCRIPTION_ID)
or os.environ.get(EnvironmentVariables.SUBSCRIPTION_ID))
self.resource_group = (self.resource_group
or os.environ.get(EnvironmentVariables.QUANTUM_RESOURCE_GROUP)
or os.environ.get(EnvironmentVariables.RESOURCE_GROUP))
self.workspace_name = (self.workspace_name
or os.environ.get(EnvironmentVariables.WORKSPACE_NAME))
self.location = (self.location
or os.environ.get(EnvironmentVariables.QUANTUM_LOCATION)
or os.environ.get(EnvironmentVariables.LOCATION))
self.user_agent_app_id = (self.user_agent_app_id
or os.environ.get(EnvironmentVariables.USER_AGENT_APPID))
self.tenant_id = (self.tenant_id
or os.environ.get(EnvironmentVariables.AZURE_TENANT_ID))
self.client_id = (self.client_id
or os.environ.get(EnvironmentVariables.AZURE_CLIENT_ID))
# for these properties we use the private field
# because the getter return default values
self.environment = (self._environment
or os.environ.get(EnvironmentVariables.QUANTUM_ENV))
# only try to use the connection string from env var if
# we really need it
if (not self.location
or not self.subscription_id
or not self.resource_group
or not self.workspace_name
or not self.credential
):
self._merge_connection_params(
connection_params=WorkspaceConnectionParams(
connection_string=os.environ.get(EnvironmentVariables.CONNECTION_STRING)),
merge_default_mode=True)
return self
@classmethod
def from_env_vars(
cls,
) -> WorkspaceConnectionParams:
"""
Initialize the WorkspaceConnectionParams from values found
in the environment variables.
"""
return WorkspaceConnectionParams().default_from_env_vars()
def _merge_re_match(self, re_match: Match[str]):
def get_value(group_name):
return re_match.groupdict().get(group_name)
self.merge(
subscription_id=get_value('subscription_id'),
resource_group=get_value('resource_group'),
workspace_name=get_value('workspace_name'),
location=get_value('location'),
quantum_endpoint=get_value('quantum_endpoint'),
api_key=get_value('api_key'),
arm_endpoint=get_value('arm_endpoint'),
)
|
azure-quantum-python/azure-quantum/azure/quantum/_workspace_connection_params.py/0
|
{
"file_path": "azure-quantum-python/azure-quantum/azure/quantum/_workspace_connection_params.py",
"repo_id": "azure-quantum-python",
"token_count": 9648
}
| 374 |
##
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
##
from typing import TYPE_CHECKING, Optional, Union, Protocol, List
from abc import abstractmethod
from azure.quantum._client.models import SessionDetails, SessionStatus, SessionJobFailurePolicy
from azure.quantum.job.workspace_item import WorkspaceItem
from azure.quantum.job import Job
__all__ = ["Session", "SessionHost", "SessionDetails", "SessionStatus", "SessionJobFailurePolicy"]
if TYPE_CHECKING:
from azure.quantum.workspace import Workspace
from azure.quantum.workspace import Target
class Session(WorkspaceItem):
"""Azure Quantum Job Session: a logical grouping of jobs.
:param workspace: Workspace instance to open the session on
:type workspace: Workspace
:param details: Session details model, containing the session id,
name, job_failure_policy, provider_id and target.
Either this parameter should be passed containing all
the session detail values, or the same values should be
passed as individual parameters.
:type details: Optional[SessionDetails]
:param target: The name of the target (or Target object) to open the session on.
:type target: Union[str, Target, None]
:param provider_id: The id of the provider to open the session on.
If not passed, it will be extracted from the target name.
:type provider_id: Optional[str]
:param id: The id of the session. If not passed, one random uuid will used.
:type id: Optional[str]
:param name: The name of the session.
If not passed, the name will be `session-{session-id}`.
:type name: Optional[str]
:param job_failure_policy: The policy that determines when a session would fail,
close and not accept further jobs.
:type job_failure_policy: Union[str, SessionJobFailurePolicy, None]
:raises ValueError: if details is passed along individual parameters,
or if required parameters are missing.
"""
def __init__(
self,
workspace: "Workspace",
details: Optional[SessionDetails] = None,
target: Union[str, "Target", None] = None,
provider_id: Optional[str] = None,
id: Optional[str] = None,
name: Optional[str] = None,
job_failure_policy: Union[str, SessionJobFailurePolicy, None] = None,
**kwargs):
from azure.quantum.target import Target
target_name = target.name if isinstance(target, Target) else target
self._target = target if isinstance(target, Target) else None
if ((details is not None)
and ((isinstance(target, str)) or
(provider_id is not None) or
(id is not None) or
(name is not None) or
(job_failure_policy is not None))):
raise ValueError("""If `details` is passed, you should not pass `target`,
`provider_id`, `id`, `name` or `job_failure_policy`.""")
if (details is None) and (target is None):
raise ValueError("If `session_details` is not passed, you should at least pass the `target`.")
if details is None:
import uuid
import re
id = id if id is not None else str(uuid.uuid1())
name = name if name is not None else f"session-{id}"
if provider_id is None:
match = re.match(r"(\w+)\.", target_name)
if match is not None:
provider_id = match.group(1)
details = SessionDetails(id=id,
name=name,
provider_id=provider_id,
target=target_name,
job_failure_policy=job_failure_policy,
**kwargs)
super().__init__(
workspace=workspace,
details=details,
**kwargs
)
@property
def details(self) -> SessionDetails:
"""Get the session details.
:return: The details about the session.
:rtype: SessionDetails
"""
return self._details
@details.setter
def details(self, value: SessionDetails):
"""Set session details.
:param value: The details about the session
:type value: SessionDetails
"""
self._details = value
@property
def target(self) -> "Target":
"""Get the target associated with the session.
:return: The target associated with the session.
:rtype: Target
"""
return self._target
def open(self) -> "Session":
"""Opens a session, effectively creating a new session in the
Azure Quantum service, and allowing it to accept jobs under it.
:return: The session object with updated details after its opening.
:rtype: Session
"""
self.workspace.open_session(self)
return self
def close(self) -> "Session":
"""Closes a session, not allowing further jobs to be submitted under
the session.
:return: The session object with updated details after its closing.
:rtype: Session
"""
self.workspace.close_session(self)
return self
def refresh(self) -> "Session":
"""Fetches the latest session details from the Azure Quantum service.
:return: The session object with updated details.
:rtype: Session
"""
self.workspace.refresh_session(self)
return self
def list_jobs(self) -> List[Job]:
"""Lists all jobs associated with this session.
:return: A list of all jobs associated with this session.
:rtype: typing.List[Job]
"""
return self.workspace.list_session_jobs(session_id=self.id)
def is_in_terminal_state(self) -> bool:
"""Returns True if the session is in one of the possible
terminal states(Succeeded, Failed and Timed_Out).
:return: True if the session is in one of the terminal states.
:rtype: bool
"""
return (self.details.status == SessionStatus.SUCCEEDED
or self.details.status == SessionStatus.FAILED
or self.details.status == SessionStatus.TIMED_OUT)
def __enter__(self):
"""PEP 343 context manager implementation to use a session in
a `with` block.
This `__enter__` method is a no-op.
"""
return self
def __exit__(self, type, value, traceback):
"""PEP 343 context manager implementation to use a session in
a `with` block.
This `__exit__` attempts to close the session.
:raises Exception: re-raises the exception that was caught
in the `with` block.
"""
self.close()
if isinstance(value, Exception):
raise
class SessionHost(Protocol):
"""A protocol to allow other objects to "host" a session.
For example, a target object can host an open session and
have all jobs that are being submitted through it to be associated
with that session.
Example (job 1 to 3 will be associated the session "MySession"):
.. highlight:: python
.. code-block::
with target.open_session(name="MySession") as session:
job1 = target.submit(input_data=input_data, job_name="Job 1")
job2 = target.submit(input_data=input_data, job_name="Job 2")
job3 = target.submit(input_data=input_data, job_name="Job 3")
"""
_latest_session: Optional[Session] = None
@property
def latest_session(self) -> Optional[Session]:
"""Get the latest (open) session associated with this object.
:return: The latest session object.
:rtype: typing.Optional[Session]
"""
return self._latest_session
@latest_session.setter
def latest_session(self, session: Optional[Session]):
"""Set the latest session.
:param value: The latest session
:type value: Optional[Session]
"""
self._latest_session = session
def get_latest_session_id(self) -> Optional[str]:
"""Get the latest (open) session id associated with this object.
This id is used to associate jobs to the latest (open) session.
:return: The latest session id.
:rtype: typing.Optional[str]
"""
return self.latest_session.id if self.latest_session else None
@abstractmethod
def _get_azure_workspace(self) -> "Workspace":
raise NotImplementedError
@abstractmethod
def _get_azure_target_id(self) -> str:
raise NotImplementedError
@abstractmethod
def _get_azure_provider_id(self) -> str:
raise NotImplementedError
def open_session(
self,
details: Optional[SessionDetails] = None,
id: Optional[str] = None,
name: Optional[str] = None,
job_failure_policy: Union[str, SessionJobFailurePolicy, None] = None,
**kwargs
) -> Session:
"""Opens a session and associates all future job submissions to that
session until the session is closed (which happens automatically
after exiting a `with` block).
Example (job 1 to 3 will be associated the session "MySession"):
.. highlight:: python
.. code-block::
with target.open_session(name="MySession") as session:
job1 = target.submit(input_data=input_data, job_name="Job 1")
job2 = target.submit(input_data=input_data, job_name="Job 2")
job3 = target.submit(input_data=input_data, job_name="Job 3")
Note: If the session host (usually a `target` or qiskit `backend`)
already has a session associated with it (in the `latest_session` property),
then this method will first attempt to close that session before opening
a new one.
:param details: Session details model, containing the session id,
name, job_failure_policy, provider_id and target.
Either this parameter should be passed containing all
the session detail values, the same values should be
passed as individual parameters.
:param id: The id of the session. If not passed, one random uuid will used.
:type id: Optional[str]
:param name: The name of the session.
If not passed, the name will be `session-{session-id}`.
:type name: Optional[str]
:param job_failure_policy: The policy that determines when a session would fail,
close and not accept further jobs.
:return: The session object with updated details after its opening.
:rtype: Session
"""
if self.latest_session:
self.latest_session.close()
session = Session(details=details,
id=id,
name=name,
job_failure_policy=job_failure_policy,
workspace=self._get_azure_workspace(),
target=self._get_azure_target_id(),
provider_id=self._get_azure_provider_id(),
**kwargs)
self.latest_session = session
return session.open()
|
azure-quantum-python/azure-quantum/azure/quantum/job/session.py/0
|
{
"file_path": "azure-quantum-python/azure-quantum/azure/quantum/job/session.py",
"repo_id": "azure-quantum-python",
"token_count": 4869
}
| 375 |
##
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
##
"""Defines set of targets for interacting with Azure Quantum"""
from .target import Target
from .ionq import IonQ
from .quantinuum import Quantinuum
from .rigetti import Rigetti
from .pasqal import Pasqal
from .microsoft.elements.dft import MicrosoftElementsDft, MicrosoftElementsDftJob
# Default targets to use when there is no target class
# associated with a given target ID
DEFAULT_TARGETS = {
"ionq": IonQ,
"quantinuum": Quantinuum,
"rigetti": Rigetti,
"pasqal": Pasqal
}
__all__ = [
"Target",
"IonQ",
"Quantinuum",
"DEFAULT_TARGETS"
]
|
azure-quantum-python/azure-quantum/azure/quantum/target/__init__.py/0
|
{
"file_path": "azure-quantum-python/azure-quantum/azure/quantum/target/__init__.py",
"repo_id": "azure-quantum-python",
"token_count": 228
}
| 376 |
"""Defines targets and helper functions for the Rigetti provider"""
##
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
##
__all__ = [
"Readout",
"Result",
]
import json
from typing import Union, Dict, List, TypeVar, cast
from ...job import Job
RawData = Union[int, float, List[float]]
class Result:
"""Downloads the data of a completed Job and extracts the ``Readout`` for each register.
.. highlight:: python
.. code-block::
from azure.quantum.job import Job
from azure.quantum.target.rigetti import Result
job = Job(...) # This job should come from a Rigetti target
job.wait_until_completed()
result = Result(job)
ro_data = result["ro"]
first_shot_data = ro_data[0]
"""
def __init__(self, job: Job) -> None:
"""
Decode the results of a Job with output type of "rigetti.quil-results.v1"
Args:
job (Job): Azure Quantum job
Raises:
RuntimeError: if the job has not completed successfully
"""
if job.details.status != "Succeeded":
raise RuntimeError(
"Cannot retrieve results as job execution failed "
f"(status: {job.details.status}."
f"error: {job.details.error_data})"
)
data = cast(Dict[str, List[List[RawData]]], json.loads(job.download_data(job.details.output_data_uri)))
self.data_per_register: Dict[str, Readout] = {k: create_readout(v) for k, v in data.items()}
def __getitem__(self, register_name: str) -> "Readout":
return self.data_per_register[register_name]
T = TypeVar("T", bound=Union[int, float, complex])
Readout = List[List[T]]
"""Contains the data of a declared "readout" memory region, usually the ``ro`` register.
All data for all shots for a single Readout will have the same datatype T corresponding to the declared Quil data type:
* ``BIT`` | ``OCTET`` | ``INTEGER``: ``int``
* ``REAL``: ``float``
"""
def create_readout(raw_data: List[List[RawData]]) -> Readout:
if isinstance(raw_data[0][0], list):
raw_data = [[complex(entry[0], entry[1]) for entry in shot] for shot in raw_data]
return raw_data
|
azure-quantum-python/azure-quantum/azure/quantum/target/rigetti/result.py/0
|
{
"file_path": "azure-quantum-python/azure-quantum/azure/quantum/target/rigetti/result.py",
"repo_id": "azure-quantum-python",
"token_count": 883
}
| 377 |
##
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
##
# Physical resource estimation for Elliptic Curve Cryptography starting from
# logical resource estimates
import argparse
import os
from azure.quantum import Workspace
from azure.quantum.target.microsoft import MicrosoftEstimator, QubitParams, \
QECScheme
import qsharp
# Configure program arguments
parser = argparse.ArgumentParser(
prog="rsa",
description="Physical resource estimation for Elliptic Curve Cryptography "
"starting from logical resource estimates")
parser.add_argument(
"-k",
"--keysize",
default=256,
help="Key size (256, 384, 521)")
parser.add_argument(
"-r",
"--resource-id",
default=os.environ.get("AZURE_QUANTUM_RESOURCE_ID"),
help="Resource ID of Azure Quantum workspace (must be set, unless set via "
"environment variable AZURE_QUANTUM_RESOURCE_ID)")
parser.add_argument(
"-l",
"--location",
default=os.environ.get("AZURE_QUANTUM_LOCATION"),
help="Location of Azure Quantum workspace (must be set, unless set via "
"environment AZURE_QUANTUM_LOCATION)")
# Parse and validate arguments
args = parser.parse_args()
if not args.resource_id:
parser.error("the following arguments are required: -r/--resource-id")
if not args.location:
parser.error("the following arguments are required: -l/--location")
# define and compile Q# operation
ECCEstimates = qsharp.compile('''
open Microsoft.Quantum.ResourceEstimation;
operation ECCEstimates(keysize: Int) : Unit {
if keysize == 256 {
use qubits = Qubit[2124];
AccountForEstimates([
TCount(7387343750), // 1.72 * 2.0^32
MeasurementCount(118111601) // 1.76 * 2.0^26
], PSSPCLayout(), qubits);
} elif keysize == 384 {
use qubits = Qubit[3151];
AccountForEstimates([
TCount(25941602468), // 1.51 * 2.0^34
MeasurementCount(660351222) // 1.23 * 2.0^29
], PSSPCLayout(), qubits);
} elif keysize == 521 {
use qubits = Qubit[4258];
AccountForEstimates([
TCount(62534723830), // 1.82 * 2.0^35
MeasurementCount(1707249501) // 1.59 * 2.0^30
], PSSPCLayout(), qubits);
} else {
fail $"keysize {keysize} is not supported";
}
}
''')
# connect to Azure Quantum workspace (you can find the information for your
# resource_id and location on the Overview page of your Quantum workspace)
workspace = Workspace(resource_id=args.resource_id, location=args.location)
estimator = MicrosoftEstimator(workspace)
params = estimator.make_params(num_items=4)
params.arguments["keysize"] = int(args.keysize)
# Error budget
params.error_budget = 0.333
# Gate-based (reasonable)
params.items[0].qubit_params.name = QubitParams.GATE_NS_E3
# Gate-based (optimistic)
params.items[1].qubit_params.name = QubitParams.GATE_NS_E4
# Majorana (reasonable)
params.items[2].qubit_params.name = QubitParams.MAJ_NS_E4
params.items[2].qec_scheme.name = QECScheme.FLOQUET_CODE
# Majorana (optimistic)
params.items[3].qubit_params.name = QubitParams.MAJ_NS_E6
params.items[3].qec_scheme.name = QECScheme.FLOQUET_CODE
job = estimator.submit(ECCEstimates, input_params=params)
results = job.get_results()
table = results.summary_data_frame(labels=[
"Gate-based (reasonable)",
"Gate-based (optimistic)",
"Majorana (reasonable)",
"Majorana (optimistic)"
])
print()
print(table[["Physical qubits", "Physical runtime"]])
## Access non-formatted values, e.g.,
# print(results[0]["physicalCounts"]["physicalQubits"])
# print(results[0]["physicalCounts"]["runtime"])
|
azure-quantum-python/azure-quantum/examples/resource_estimation/ecc.py/0
|
{
"file_path": "azure-quantum-python/azure-quantum/examples/resource_estimation/ecc.py",
"repo_id": "azure-quantum-python",
"token_count": 1345
}
| 378 |
##
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
##
import os
import warnings
import pytest
from importlib.metadata import version, PackageNotFoundError
qsharp_installed = False
try:
# change path to the test folder so that it
# can correctly import the .qs files
# when Q# initializes
os.chdir(os.path.dirname(os.path.abspath(__file__)))
print(f'Attempting to import qsharp. Current folder: {os.path.curdir}')
qsharp_version = version("qsharp")
print(f'qsharp v{qsharp_version} was successfully imported!')
import qsharp
qsharp.init(target_profile=qsharp.TargetProfile.Base)
print(f'Base Q# Profile has been successfully initialized.')
qsharp_installed = True
except PackageNotFoundError as ex:
warnings.warn(f"`qsharp` package was not found. Make sure it's installed.", source=ex)
except Exception as ex:
warnings.warn(f"Failed to import `qsharp` with error: {ex}", source=ex)
skip_if_no_qsharp = pytest.mark.skipif(not qsharp_installed, reason="Test requires qsharp and IQ# kernel installed.")
|
azure-quantum-python/azure-quantum/tests/unit/import_qsharp.py/0
|
{
"file_path": "azure-quantum-python/azure-quantum/tests/unit/import_qsharp.py",
"repo_id": "azure-quantum-python",
"token_count": 354
}
| 379 |
<jupyter_start><jupyter_text>👋🌍 Hello, world: Submit a Q job to RigettiIn this notebook, we'll review the basics of Azure Quantum by submitting a simple *job*, or quantum program, to [Rigetti](https://www.rigetti.com/). We will use [Q](https://learn.microsoft.com/azure/quantum/user-guide/) to express the quantum job. Submit a simple job to Rigetti using Azure QuantumAzure Quantum provides several ways to express quantum programs. In this example we are using Q, but note that Qiskit is also supported. All code in this example will be written in Python and Q.Let's begin. When you see a code block, hover over it and click the triangle play-button to execute it. To avoid any compilation issues, this should be done in order from top to bottom. 1. Connect to the Azure Quantum workspaceTo connect to the Azure Quantum service, initialize the `Workspace` as seen below.<jupyter_code>from azure.quantum import Workspace
workspace = Workspace (
resource_id = "",
location = ""
)<jupyter_output><empty_output><jupyter_text>We can use the resulting object to see which _targets_ are available for submission.<jupyter_code>print("This workspace's targets:")
for target in workspace.get_targets():
print("-", target.name)<jupyter_output><empty_output><jupyter_text>❕ Do you see `rigetti.sim.qvm` in your list of targets? If so, you're ready to keep going.Don't see it? You may need to add Rigetti to your workspace to run this sample. Navigate to the **Providers** page in the portal and click **+Add** to add the Rigetti provider. Rigetti: The quantum providerAzure Quantum partners with third-party companies to deliver solutions to quantum jobs. These company offerings are called *providers*. Each provider can offer multiple *targets* with different capabilities. See the table below for Rigetti's targets.| Target name | Target ID | Number of qubits | Description || --- | --- | --- | --- || Rigetti QVM (simulator) | `rigetti.sim.qvm` | 20 qubits | Rigetti's cloud-based, [open-source](https://github.com/quil-lang/qvm) "Quantum Virtual Machine" simulator. Free to use. || Ankaa-2 (hardware) | `rigetti.qpu.ankaa-2` | 84 qubits | A 4th-generation, square-lattice processor. Pricing based on QPUs. |For this example, we will use `rigetti.sim.qvm`. To learn more about Rigetti's targets, check out [Rigetti's Azure Quantum documentation](https://learn.microsoft.com/azure/quantum/provider-rigetti). 2. Build the quantum programLet's create a simple Q program to run.First, let's initialize the Q environment and set the target profile to Base Profile. Today, Azure Quantum targets only support the Base Profile, a subset of all Q commands.<jupyter_code>import qsharp
qsharp.init(target_profile=qsharp.TargetProfile.Base)
%%qsharp
open Microsoft.Quantum.Measurement;
open Microsoft.Quantum.Arrays;
open Microsoft.Quantum.Convert;
operation GenerateRandomBit() : Result {
use target = Qubit();
// Apply an H-gate and measure.
H(target);
return M(target);
}
# Compile the qsharp operation
operation = qsharp.compile("GenerateRandomBit()")<jupyter_output><empty_output><jupyter_text>The program you built is a simple quantum random bit generator. With Rigetti's simulator, we will be able to estimate the probability of measuring a `1` or `0`. 3. Submit the quantum program to RigettiWe will use the `target.submit` function to run the quantum program above on Rigetti's `rigetti.sim.qvm` target. This may take a minute or so ⏳. Your job will be packaged and sent to Rigetti, where it will wait its turn to be run.<jupyter_code># Set the target to rigetti.sim.qvm
target = workspace.get_targets("rigetti.sim.qvm")
# Execute the job. We'll use 100 shots (simulated runs).
job = target.submit(operation, "Generate one random bit", shots=100)
print("Job Id:" + job.id)
result = job.get_results()<jupyter_output><empty_output><jupyter_text>The job ID can be used to retrieve the results later using the [get_job method](https://learn.microsoft.com/python/azure-quantum/azure.quantum.workspace?azure-quantum-workspace-get-job) or by viewing it under the **Job management** section of the portal. 4. Visualize job results You can also view a histogram of the results using [`pyplot`](https://matplotlib.org/stable/api/_as_gen/matplotlib.pyplot.html):<jupyter_code>from matplotlib import pyplot
pyplot.bar(result.keys(), result.values())
pyplot.title("Result")
pyplot.xlabel("Measurement")
pyplot.ylabel("Probability")
pyplot.show()<jupyter_output><empty_output>
|
azure-quantum-python/samples/hello-world/HW-rigetti-qsharp.ipynb/0
|
{
"file_path": "azure-quantum-python/samples/hello-world/HW-rigetti-qsharp.ipynb",
"repo_id": "azure-quantum-python",
"token_count": 1312
}
| 380 |
/*------------------------------------
Copyright (c) Microsoft Corporation.
Licensed under the MIT License.
All rights reserved.
------------------------------------ */
import * as d3 from "d3";
import * as d3Format from "d3-format";
export type LegendData = {
title: string;
legendTitle: string;
value: number;
};
export interface TextStyle {
fontFamily: string;
fontStyle: string;
fontWeight: string;
fontSize: string;
lineHeight: string;
display: string;
alignItems: string | null | undefined;
textAlign: string | null | undefined;
color: string;
textAnchor: string | null | undefined;
}
export function drawEllipses(
svg: d3.Selection<d3.BaseType, unknown, HTMLElement, any>,
cx: number,
cy: number,
spaceBetween: number,
radius: number,
fillColor: string,
) {
svg
.append("circle")
.attr("cx", cx)
.attr("cy", cy)
.attr("fill", fillColor)
.attr("r", radius);
svg
.append("circle")
.attr("cx", cx + spaceBetween)
.attr("cy", cy)
.attr("fill", fillColor)
.attr("r", radius);
svg
.append("circle")
.attr("cx", cx + spaceBetween * 2)
.attr("cy", cy)
.attr("fill", fillColor)
.attr("r", radius);
}
export function drawLine(
svg: d3.Selection<d3.BaseType, unknown, HTMLElement, any>,
linePoints: number[][],
id: string,
strokeWidth: string,
markerStart: string,
markerEnd: string,
fillColor: string,
strokeColor: string,
isDashed: boolean,
) {
// Create line generator
const lineGenerator = d3.line();
if (isDashed) {
svg
.append("path")
.attr("id", id)
.attr("d", lineGenerator(linePoints as any))
.attr("stroke-width", strokeWidth)
.attr("marker-start", markerStart)
.attr("marker-end", markerEnd)
.style("fill", fillColor)
.style("stroke-dasharray", "3,3")
.style("stroke", strokeColor);
} else {
svg
.append("path")
.attr("id", id)
.attr("d", lineGenerator(linePoints as any))
.attr("stroke-width", strokeWidth)
.attr("marker-start", markerStart)
.attr("marker-end", markerEnd)
.style("fill", fillColor)
.style("stroke", strokeColor)
.lower();
}
}
export function drawLegend(
svg: d3.Selection<d3.BaseType, unknown, HTMLElement, any>,
legendData: LegendData[],
midpoint: number,
chartBottomY: number,
chartStartX: number,
legendColor: d3.ScaleOrdinal<string, unknown, never>,
showLegendValues: boolean,
useTitleForColor: boolean,
) {
const legend = svg
.selectAll(".legend")
.data(legendData)
.enter()
.append("g")
.attr(
"transform",
(d, i) => `translate(${midpoint * i + chartStartX}, ${chartBottomY})`,
);
if (useTitleForColor) {
legend
.append("rect")
.attr("width", 20)
.attr("height", 20)
.attr("x", 0)
.attr("y", 50)
.style("fill", (d) => legendColor(d.title) as string);
} else {
legend
.append("rect")
.attr("width", 20)
.attr("height", 20)
.attr("x", 0)
.attr("y", 50)
.style("fill", (d) => legendColor(d.legendTitle) as string);
}
legend
.append("text")
.attr("x", 25)
.attr("y", 60)
.text((d) => `${d.title}`)
.style("font-size", "14px")
.style("font-family", "Segoe UI")
.style("line-height", "18px")
.style("fill", "black")
.style("font-weight", "600")
.style("font-style", "normal");
legend
.append("text")
.attr("x", 25)
.attr("y", 75)
.text((d) => `${d.legendTitle}`)
.style("font-size", "14px")
.style("font-family", "Segoe UI")
.style("line-height", "18px")
.style("fill", "black")
.style("font-weight", "400")
.style("font-style", "normal");
if (showLegendValues) {
legend
.append("text")
.attr("x", 25)
.attr("y", 100)
.text((d) => `${d3Format.format(",.0f")(d.value)}`)
.style("font-size", "28px")
.style("font-family", "Segoe UI")
.style("line-height", "34px")
.style("fill", "#24272b")
.style("font-style", "normal")
.style("font-weight", "600");
}
}
export function drawText(
svg: d3.Selection<d3.BaseType, unknown, HTMLElement, any>,
text: string,
x: number,
y: number,
styles: TextStyle,
) {
const alignItems = styles.alignItems ? styles.alignItems : "";
const textAlign = styles.textAlign ? styles.textAlign : "";
const textAnchor = styles.textAnchor ? styles.textAnchor : "";
svg
.append("text")
.attr("x", x)
.attr("y", y)
.text(text)
.raise()
.style("font-family", styles.fontFamily)
.style("font-style", styles.fontStyle)
.style("font-weight", styles.fontWeight)
.style("font-size", styles.fontSize)
.style("line-height", styles.lineHeight)
.style("display", styles.display)
.style("align-items", alignItems)
.style("text-align", textAlign)
.style("fill", styles.color)
.style("text-anchor", textAnchor);
}
export function drawArrow(
svg: d3.Selection<d3.BaseType, unknown, HTMLElement, any>,
color: string,
id: string,
) {
const markerDim = 3;
const refX = 2;
const refY = 1.5;
const arrowPoints = [
[0, 0],
[0, markerDim],
[markerDim, refY],
];
svg
.append("marker")
.attr("id", id)
.attr("refX", refX)
.attr("refY", refY)
.attr("markerWidth", markerDim)
.attr("markerHeight", markerDim)
.style("fill", color)
.attr("orient", "auto-start-reverse")
.append("path")
.attr("d", d3.line()(arrowPoints as any));
}
export function drawLineTick(
svg: d3.Selection<d3.BaseType, unknown, HTMLElement, any>,
width: number,
height: number,
color: string,
id: string,
) {
const refX = 1;
const refY = height / 2;
const markerPoints = [
[0, 0],
[0, height],
[width, height],
[width, 0],
];
svg
.append("defs")
.append("marker")
.attr("id", id)
.attr("refX", refX)
.attr("refY", refY)
.attr("markerHeight", height)
.attr("markerWidth", width)
.style("fill", color)
.style("stroke", color)
.attr("orient", "auto-start-reverse")
.append("path")
.attr("d", d3.line()(markerPoints as any));
}
export function drawCircleMarkers(
svg: d3.Selection<d3.BaseType, unknown, HTMLElement, any>,
width: number,
height: number,
color: string,
radius: number,
refX: number,
refY: number,
cx: number,
cy: number,
id: string,
) {
svg
.append("defs")
.append("marker")
.attr("id", id)
.attr("refX", refX)
.attr("refY", refY)
.attr("markerWidth", width)
.attr("markerHeight", height)
.append("circle")
.attr("cx", cx)
.attr("cy", cy)
.attr("r", radius)
.style("fill", color);
}
|
azure-quantum-python/visualization/react-lib/src/components/d3-visualization-components/D3HelperFunctions.ts/0
|
{
"file_path": "azure-quantum-python/visualization/react-lib/src/components/d3-visualization-components/D3HelperFunctions.ts",
"repo_id": "azure-quantum-python",
"token_count": 2826
}
| 381 |
export * from "./components";
|
azure-quantum-python/visualization/react-lib/src/index.ts/0
|
{
"file_path": "azure-quantum-python/visualization/react-lib/src/index.ts",
"repo_id": "azure-quantum-python",
"token_count": 9
}
| 382 |
Tokenizer
=========
.. js:autoclass:: Tokenizer
:members:
.. js:autoclass:: RegExpTokenizer
:members:
.. js:autoclass:: SplittingTokenizer
:members:
|
bistring/docs/JavaScript/Tokenizer.rst/0
|
{
"file_path": "bistring/docs/JavaScript/Tokenizer.rst",
"repo_id": "bistring",
"token_count": 64
}
| 383 |
/*!
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT license.
*/
import { Alignment } from "..";
test("Empty Alignment", () => {
expect(() => new Alignment([])).toThrow();
const a = Alignment.identity(0);
expect(a.values).toEqual([[0, 0]]);
expect(a.originalBounds()).toEqual([0, 0]);
expect(a.modifiedBounds()).toEqual([0, 0]);
expect(a.originalBounds(0, 0)).toEqual([0, 0]);
expect(a.modifiedBounds(0, 0)).toEqual([0, 0]);
});
test("Alignment.identity()", () => {
const a = Alignment.identity(1, 16);
const values = [];
for (let i = 1; i <= 16; ++i) {
values.push([i, i]);
}
expect(a.values).toEqual(values);
expect(a.originalBounds()).toEqual([1, 16]);
expect(a.modifiedBounds()).toEqual([1, 16]);
expect(a.originalBounds(4, 7)).toEqual([4, 7]);
expect(a.modifiedBounds(4, 7)).toEqual([4, 7]);
});
test("Alignment", () => {
const a = new Alignment([[0, 0], [1, 2], [2, 4], [3, 6]]);
expect(a.originalBounds()).toEqual([0, 3]);
expect(a.modifiedBounds()).toEqual([0, 6]);
expect(a.originalBounds(0, 0)).toEqual([0, 0]);
expect(a.originalBounds(0, 1)).toEqual([0, 1]);
expect(a.originalBounds(0, 2)).toEqual([0, 1]);
expect(a.originalBounds(0, 3)).toEqual([0, 2]);
expect(a.originalBounds(1, 1)).toEqual([0, 1]);
expect(a.originalBounds(1, 3)).toEqual([0, 2]);
expect(a.originalBounds(1, 4)).toEqual([0, 2]);
expect(a.originalBounds(2, 2)).toEqual([1, 1]);
expect(a.originalBounds(2, 4)).toEqual([1, 2]);
expect(a.originalBounds(2, 5)).toEqual([1, 3]);
expect(a.originalBounds(3, 3)).toEqual([1, 2]);
expect(a.modifiedBounds(0, 0)).toEqual([0, 0]);
expect(a.modifiedBounds(0, 1)).toEqual([0, 2]);
expect(a.modifiedBounds(0, 2)).toEqual([0, 4]);
expect(a.modifiedBounds(0, 3)).toEqual([0, 6]);
expect(a.modifiedBounds(1, 1)).toEqual([2, 2]);
expect(a.modifiedBounds(2, 2)).toEqual([4, 4]);
});
test("Alignment canonicalization", () => {
let a = new Alignment([[0, 0], [1, 2], [1, 2], [2, 4]]);
expect(a.values).toEqual([[0, 0], [1, 2], [2, 4]]);
a = new Alignment([[0, 0], [1, 2]])
.concat(new Alignment([[1, 2], [2, 4]]));
expect(a.values).toEqual([[0, 0], [1, 2], [2, 4]]);
});
function test_composition(first: Alignment, second: Alignment) {
const composed = first.compose(second);
const [of, ol] = composed.originalBounds();
const [mf, ml] = composed.modifiedBounds();
expect([of, ol]).toEqual(first.originalBounds());
expect([mf, ml]).toEqual(second.modifiedBounds());
for (let i = of; i <= ol; ++i) {
for (let j = i; j <= ol; ++j) {
expect(composed.modifiedBounds(i, j))
.toEqual(second.modifiedBounds(first.modifiedBounds(i, j)));
}
}
for (let i = mf; i <= ml; ++i) {
for (let j = i; j <= ml; ++j) {
expect(composed.originalBounds(i, j))
.toEqual(first.originalBounds(second.originalBounds(i, j)));
}
}
}
test("Alignment.compose", () => {
const first = new Alignment([
[0, 0],
[1, 2],
[2, 4],
[3, 6],
]);
const second = new Alignment([
[0, 0],
[1, 2],
[2, 4],
[3, 6],
[4, 8],
[5, 10],
[6, 11],
]);
test_composition(first, second);
});
function test_identity_composition(alignment: Alignment) {
test_composition(alignment, Alignment.identity(alignment.modifiedBounds()));
test_composition(Alignment.identity(alignment.originalBounds()), alignment);
}
test("Alignment.compose(Alignment.identity)", () => {
const a = new Alignment([
[0, 2],
[2, 2],
[4, 4],
[6, 6],
[8, 6],
]);
// Modified sequence is smaller
test_identity_composition(a);
// Original sequence is smaller
test_identity_composition(a.inverse());
});
test("Alignment.infer", () => {
let a = Alignment.infer("test", "test");
let b = Alignment.identity(4);
expect(a.equals(b)).toBe(true);
a = Alignment.infer("asdf", "jkl;");
expect(a.equals(b)).toBe(true);
a = Alignment.infer("color", "colour");
b = new Alignment([
[0, 0],
[1, 1],
[2, 2],
[3, 3],
[4, 4],
[4, 5],
[5, 6],
]);
expect(a.equals(b)).toBe(true);
a = Alignment.infer("ab---", "ab");
b = new Alignment([
[0, 0],
[1, 1],
[2, 2],
[3, 2],
[4, 2],
[5, 2],
]);
expect(a.equals(b)).toBe(true);
});
|
bistring/js/tests/alignment.test.ts/0
|
{
"file_path": "bistring/js/tests/alignment.test.ts",
"repo_id": "bistring",
"token_count": 2181
}
| 384 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT license.
import re
from typing import Match, Pattern
from ._typing import Regex, Replacement
def compile_regex(regex: Regex) -> Pattern[str]:
if isinstance(regex, str):
return re.compile(regex)
else:
return regex
def expand_template(match: Match[str], repl: Replacement) -> str:
if callable(repl):
return repl(match)
else:
return match.expand(repl)
|
bistring/python/bistring/_regex.py/0
|
{
"file_path": "bistring/python/bistring/_regex.py",
"repo_id": "bistring",
"token_count": 173
}
| 385 |
{
"$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"groupName": {
"type": "string",
"metadata": {
"description": "Specifies the name of the Resource Group."
}
},
"groupLocation": {
"type": "string",
"metadata": {
"description": "Specifies the location of the Resource Group."
}
},
"appServiceName": {
"type": "string",
"metadata": {
"description": "The globally unique name of the Web App."
}
},
"appServicePlanName": {
"type": "string",
"metadata": {
"description": "The name of the App Service Plan."
}
},
"appServicePlanLocation": {
"type": "string",
"metadata": {
"description": "The location of the App Service Plan."
}
},
"appServicePlanSku": {
"type": "object",
"defaultValue": {
"name": "S1",
"tier": "Standard",
"size": "S1",
"family": "S",
"capacity": 1
},
"metadata": {
"description": "The SKU of the App Service Plan. Defaults to Standard values."
}
},
"appId": {
"type": "string",
"metadata": {
"description": "Active Directory App ID or User-Assigned Managed Identity Client ID, set as MicrosoftAppId in the Web App's Application Settings."
}
},
"appSecret": {
"type": "string",
"metadata": {
"description": "Active Directory App Password, set as MicrosoftAppPassword in the Web App's Application Settings. Required for MultiTenant and SingleTenant app types."
}
}
},
"variables": {
"appServicePlanName": "[parameters('appServicePlanName')]",
"resourcesLocation": "[if(empty(parameters('appServicePlanLocation')), parameters('groupLocation'), parameters('appServicePlanLocation'))]",
"appServiceName": "[parameters('appServiceName')]",
"resourceGroupId": "[concat(subscription().id, '/resourceGroups/', parameters('groupName'))]"
},
"resources": [
{
"name": "[parameters('groupName')]",
"type": "Microsoft.Resources/resourceGroups",
"apiVersion": "2018-05-01",
"location": "[parameters('groupLocation')]",
"properties": {}
},
{
"type": "Microsoft.Resources/deployments",
"apiVersion": "2018-05-01",
"name": "storageDeployment",
"resourceGroup": "[parameters('groupName')]",
"dependsOn": [
"[resourceId('Microsoft.Resources/resourceGroups/', parameters('groupName'))]"
],
"properties": {
"mode": "Incremental",
"template": {
"$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {},
"variables": {},
"resources": [
{
"comments": "Create a new App Service Plan",
"type": "Microsoft.Web/serverfarms",
"name": "[variables('appServicePlanName')]",
"apiVersion": "2018-02-01",
"location": "[variables('resourcesLocation')]",
"sku": "[parameters('appServicePlanSku')]",
"kind": "linux",
"properties": {
"name": "[variables('appServicePlanName')]",
"perSiteScaling": false,
"reserved": true,
"targetWorkerCount": 0,
"targetWorkerSizeId": 0
}
},
{
"comments": "Create a Web App using the new App Service Plan",
"type": "Microsoft.Web/sites",
"apiVersion": "2015-08-01",
"location": "[variables('resourcesLocation')]",
"kind": "app,linux",
"dependsOn": [
"[concat(variables('resourceGroupId'), '/providers/Microsoft.Web/serverfarms/', variables('appServicePlanName'))]"
],
"name": "[variables('appServiceName')]",
"properties": {
"name": "[variables('appServiceName')]",
"hostNameSslStates": [
{
"name": "[concat(parameters('appServiceName'), '.azurewebsites.net')]",
"sslState": "Disabled",
"hostType": "Standard"
},
{
"name": "[concat(parameters('appServiceName'), '.scm.azurewebsites.net')]",
"sslState": "Disabled",
"hostType": "Repository"
}
],
"serverFarmId": "[variables('appServicePlanName')]",
"siteConfig": {
"appSettings": [
{
"name": "SCM_DO_BUILD_DURING_DEPLOYMENT",
"value": "true"
},
{
"name": "MicrosoftAppId",
"value": "[parameters('appId')]"
},
{
"name": "MicrosoftAppPassword",
"value": "[parameters('appSecret')]"
}
],
"cors": {
"allowedOrigins": [
"https://botservice.hosting.portal.azure.net",
"https://hosting.onecloud.azure-test.net/"
]
},
"webSocketsEnabled": true
}
}
},
{
"type": "Microsoft.Web/sites/config",
"apiVersion": "2016-08-01",
"name": "[concat(parameters('appServiceName'), '/web')]",
"location": "[variables('resourcesLocation')]",
"dependsOn": [
"[concat(variables('resourceGroupId'), '/providers/Microsoft.Web/sites/', parameters('appServiceName'))]"
],
"properties": {
"numberOfWorkers": 1,
"defaultDocuments": [
"Default.htm",
"Default.html",
"Default.asp",
"index.htm",
"index.html",
"iisstart.htm",
"default.aspx",
"index.php",
"hostingstart.html"
],
"netFrameworkVersion": "v4.0",
"phpVersion": "",
"pythonVersion": "",
"nodeVersion": "",
"linuxFxVersion": "PYTHON|3.7",
"requestTracingEnabled": false,
"remoteDebuggingEnabled": false,
"remoteDebuggingVersion": "VS2017",
"httpLoggingEnabled": true,
"logsDirectorySizeLimit": 35,
"detailedErrorLoggingEnabled": false,
"publishingUsername": "[concat('$', parameters('appServiceName'))]",
"scmType": "None",
"use32BitWorkerProcess": true,
"webSocketsEnabled": false,
"alwaysOn": false,
"appCommandLine": "gunicorn --bind 0.0.0.0 --worker-class aiohttp.worker.GunicornWebWorker --timeout 600 app:APP",
"managedPipelineMode": "Integrated",
"virtualApplications": [
{
"virtualPath": "/",
"physicalPath": "site\\wwwroot",
"preloadEnabled": false,
"virtualDirectories": null
}
],
"winAuthAdminState": 0,
"winAuthTenantState": 0,
"customAppPoolIdentityAdminState": false,
"customAppPoolIdentityTenantState": false,
"loadBalancing": "LeastRequests",
"routingRules": [],
"experiments": {
"rampUpRules": []
},
"autoHealEnabled": false,
"vnetName": "",
"minTlsVersion": "1.2",
"ftpsState": "AllAllowed",
"reservedInstanceCount": 0
}
}
],
"outputs": {}
}
}
}
]
}
|
botbuilder-python/generators/app/templates/echo/{{cookiecutter.bot_name}}/deploymentTemplates/deployWithNewResourceGroup/template-BotApp-new-rg.json/0
|
{
"file_path": "botbuilder-python/generators/app/templates/echo/{{cookiecutter.bot_name}}/deploymentTemplates/deployWithNewResourceGroup/template-BotApp-new-rg.json",
"repo_id": "botbuilder-python",
"token_count": 7137
}
| 386 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from typing import List
from botbuilder.adapters.slack.slack_event import SlackEvent
from botbuilder.adapters.slack.slack_payload import SlackPayload
class SlackRequestBody:
def __init__(self, **kwargs):
self.challenge = kwargs.get("challenge")
self.token = kwargs.get("token")
self.team_id = kwargs.get("team_id")
self.api_app_id = kwargs.get("api_app_id")
self.type = kwargs.get("type")
self.event_id = kwargs.get("event_id")
self.event_time = kwargs.get("event_time")
self.authed_users: List[str] = kwargs.get("authed_users")
self.trigger_id = kwargs.get("trigger_id")
self.channel_id = kwargs.get("channel_id")
self.user_id = kwargs.get("user_id")
self.text = kwargs.get("text")
self.command = kwargs.get("command")
self.payload: SlackPayload = None
payload = kwargs.get("payload")
if payload is not None:
self.payload = (
payload
if isinstance(payload, SlackPayload)
else SlackPayload(**payload)
)
self.event: SlackEvent = None
event = kwargs.get("event")
if event is not None:
self.event = event if isinstance(event, SlackEvent) else SlackEvent(**event)
|
botbuilder-python/libraries/botbuilder-adapters-slack/botbuilder/adapters/slack/slack_request_body.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-adapters-slack/botbuilder/adapters/slack/slack_request_body.py",
"repo_id": "botbuilder-python",
"token_count": 624
}
| 387 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from typing import List
from botbuilder.core import BotTelemetryClient, NullTelemetryClient
from .luis_recognizer_options import LuisRecognizerOptions
class LuisRecognizerOptionsV3(LuisRecognizerOptions):
def __init__(
self,
include_all_intents: bool = False,
include_instance_data: bool = True,
log: bool = True,
prefer_external_entities: bool = True,
datetime_reference: str = None,
dynamic_lists: List = None,
external_entities: List = None,
slot: str = "production",
version: str = None,
include_api_results: bool = True,
telemetry_client: BotTelemetryClient = NullTelemetryClient(),
log_personal_information: bool = False,
):
super().__init__(
include_api_results, telemetry_client, log_personal_information
)
self.include_all_intents = include_all_intents
self.include_instance_data = include_instance_data
self.log = log
self.prefer_external_entities = prefer_external_entities
self.datetime_reference = datetime_reference
self.dynamic_lists = dynamic_lists
self.external_entities = external_entities
self.slot = slot
self.version: str = version
|
botbuilder-python/libraries/botbuilder-ai/botbuilder/ai/luis/luis_recognizer_options_v3.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-ai/botbuilder/ai/luis/luis_recognizer_options_v3.py",
"repo_id": "botbuilder-python",
"token_count": 526
}
| 388 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from msrest.serialization import Model
class QnARequestContext(Model):
"""
The context associated with QnA.
Used to mark if the current prompt is relevant with a previous question or not.
"""
_attribute_map = {
"previous_qna_id": {"key": "previousQnAId", "type": "int"},
"previous_user_query": {"key": "previousUserQuery", "type": "string"},
}
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.previous_qna_id = kwargs.get("previous_qna_id", None)
self.previous_user_query = kwargs.get("previous_user_query", None)
|
botbuilder-python/libraries/botbuilder-ai/botbuilder/ai/qna/models/qna_request_context.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-ai/botbuilder/ai/qna/models/qna_request_context.py",
"repo_id": "botbuilder-python",
"token_count": 264
}
| 389 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from typing import List
from botbuilder.core import CardFactory
from botbuilder.schema import Activity, ActivityTypes, CardAction, HeroCard
from ..models import QueryResult
class QnACardBuilder:
"""
Message activity card builder for QnAMaker dialogs.
"""
@staticmethod
def get_suggestions_card(
suggestions: List[str], card_title: str, card_no_match: str
) -> Activity:
"""
Get active learning suggestions card.
"""
if not suggestions:
raise TypeError("suggestions list is required")
if not card_title:
raise TypeError("card_title is required")
if not card_no_match:
raise TypeError("card_no_match is required")
# Add all suggestions
button_list = [
CardAction(value=suggestion, type="imBack", title=suggestion)
for suggestion in suggestions
]
# Add No match text
button_list.append(
CardAction(value=card_no_match, type="imBack", title=card_no_match)
)
attachment = CardFactory.hero_card(HeroCard(buttons=button_list))
return Activity(
type=ActivityTypes.message, text=card_title, attachments=[attachment]
)
@staticmethod
def get_qna_prompts_card(result: QueryResult, card_no_match_text: str) -> Activity:
"""
Get active learning suggestions card.
"""
if not result:
raise TypeError("result is required")
if not card_no_match_text:
raise TypeError("card_no_match_text is required")
# Add all prompts
button_list = [
CardAction(
value=prompt.display_text,
type="imBack",
title=prompt.display_text,
)
for prompt in result.context.prompts
]
attachment = CardFactory.hero_card(HeroCard(buttons=button_list))
return Activity(
type=ActivityTypes.message, text=result.answer, attachments=[attachment]
)
|
botbuilder-python/libraries/botbuilder-ai/botbuilder/ai/qna/utils/qna_card_builder.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-ai/botbuilder/ai/qna/utils/qna_card_builder.py",
"repo_id": "botbuilder-python",
"token_count": 903
}
| 390 |
{
"entities": {
"$instance": {
"Composite2": [
{
"endIndex": 69,
"modelType": "Composite Entity Extractor",
"recognitionSources": [
"model"
],
"score": 0.97076714,
"startIndex": 0,
"text": "http://foo.com is where you can fly from seattle to dallas via denver",
"type": "Composite2"
}
],
"geographyV2": [
{
"endIndex": 48,
"modelType": "Prebuilt Entity Extractor",
"recognitionSources": [
"model"
],
"startIndex": 41,
"text": "seattle",
"type": "builtin.geographyV2.city"
}
]
},
"Composite2": [
{
"$instance": {
"City": [
{
"endIndex": 69,
"modelType": "Hierarchical Entity Extractor",
"recognitionSources": [
"model"
],
"score": 0.984581649,
"startIndex": 63,
"text": "denver",
"type": "City"
}
],
"From": [
{
"endIndex": 48,
"modelType": "Hierarchical Entity Extractor",
"recognitionSources": [
"model"
],
"score": 0.999511,
"startIndex": 41,
"text": "seattle",
"type": "City::From"
}
],
"To": [
{
"endIndex": 58,
"modelType": "Hierarchical Entity Extractor",
"recognitionSources": [
"model"
],
"score": 0.9984612,
"startIndex": 52,
"text": "dallas",
"type": "City::To"
}
],
"url": [
{
"endIndex": 14,
"modelType": "Prebuilt Entity Extractor",
"recognitionSources": [
"model"
],
"startIndex": 0,
"text": "http://foo.com",
"type": "builtin.url"
}
]
},
"City": [
"denver"
],
"From": [
"seattle"
],
"To": [
"dallas"
],
"url": [
"http://foo.com"
]
}
],
"geographyV2": [
{
"location": "seattle",
"type": "city"
}
]
},
"intents": {
"Cancel": {
"score": 0.000227437369
},
"Delivery": {
"score": 0.001310123
},
"EntityTests": {
"score": 0.94500196
},
"Greeting": {
"score": 0.000152356763
},
"Help": {
"score": 0.000547201431
},
"None": {
"score": 0.004187195
},
"Roles": {
"score": 0.0300086979
},
"search": {
"score": 0.0108942846
},
"SpecifyName": {
"score": 0.00168467627
},
"Travel": {
"score": 0.0154484725
},
"Weather_GetForecast": {
"score": 0.0237181056
}
},
"sentiment": {
"label": "neutral",
"score": 0.5
},
"text": "http://foo.com is where you can fly from seattle to dallas via denver",
"v3": {
"options": {
"includeAllIntents": true,
"includeAPIResults": true,
"includeInstanceData": true,
"log": true,
"preferExternalEntities": true,
"slot": "production"
},
"response": {
"prediction": {
"entities": {
"$instance": {
"Composite2": [
{
"length": 69,
"modelType": "Composite Entity Extractor",
"modelTypeId": 4,
"recognitionSources": [
"model"
],
"score": 0.97076714,
"startIndex": 0,
"text": "http://foo.com is where you can fly from seattle to dallas via denver",
"type": "Composite2"
}
],
"geographyV2": [
{
"length": 7,
"modelType": "Prebuilt Entity Extractor",
"modelTypeId": 2,
"recognitionSources": [
"model"
],
"startIndex": 41,
"text": "seattle",
"type": "builtin.geographyV2.city"
}
]
},
"Composite2": [
{
"$instance": {
"City": [
{
"length": 6,
"modelType": "Hierarchical Entity Extractor",
"modelTypeId": 3,
"recognitionSources": [
"model"
],
"score": 0.984581649,
"startIndex": 63,
"text": "denver",
"type": "City"
}
],
"City::From": [
{
"length": 7,
"modelType": "Hierarchical Entity Extractor",
"modelTypeId": 3,
"recognitionSources": [
"model"
],
"score": 0.999511,
"startIndex": 41,
"text": "seattle",
"type": "City::From"
}
],
"City::To": [
{
"length": 6,
"modelType": "Hierarchical Entity Extractor",
"modelTypeId": 3,
"recognitionSources": [
"model"
],
"score": 0.9984612,
"startIndex": 52,
"text": "dallas",
"type": "City::To"
}
],
"url": [
{
"length": 14,
"modelType": "Prebuilt Entity Extractor",
"modelTypeId": 2,
"recognitionSources": [
"model"
],
"startIndex": 0,
"text": "http://foo.com",
"type": "builtin.url"
}
]
},
"City": [
"denver"
],
"City::From": [
"seattle"
],
"City::To": [
"dallas"
],
"url": [
"http://foo.com"
]
}
],
"geographyV2": [
{
"type": "city",
"value": "seattle"
}
]
},
"intents": {
"Cancel": {
"score": 0.000227437369
},
"Delivery": {
"score": 0.001310123
},
"EntityTests": {
"score": 0.94500196
},
"Greeting": {
"score": 0.000152356763
},
"Help": {
"score": 0.000547201431
},
"None": {
"score": 0.004187195
},
"Roles": {
"score": 0.0300086979
},
"search": {
"score": 0.0108942846
},
"SpecifyName": {
"score": 0.00168467627
},
"Travel": {
"score": 0.0154484725
},
"Weather.GetForecast": {
"score": 0.0237181056
}
},
"normalizedQuery": "http://foo.com is where you can fly from seattle to dallas via denver",
"sentiment": {
"label": "neutral",
"score": 0.5
},
"topIntent": "EntityTests"
},
"query": "http://foo.com is where you can fly from seattle to dallas via denver"
}
}
}
|
botbuilder-python/libraries/botbuilder-ai/tests/luis/test_data/Composite2_v3.json/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-ai/tests/luis/test_data/Composite2_v3.json",
"repo_id": "botbuilder-python",
"token_count": 5786
}
| 391 |
{
"answers": [
{
"questions": [],
"answer": "No good match found in KB.",
"score": 0,
"id": -1,
"source": null,
"metadata": []
}
],
"debugInfo": null
}
|
botbuilder-python/libraries/botbuilder-ai/tests/qna/test_data/NoAnswerFoundInKb.json/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-ai/tests/qna/test_data/NoAnswerFoundInKb.json",
"repo_id": "botbuilder-python",
"token_count": 154
}
| 392 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
from .application_insights_telemetry_client import (
ApplicationInsightsTelemetryClient,
bot_telemetry_processor,
)
from .bot_telemetry_processor import BotTelemetryProcessor
__all__ = [
"ApplicationInsightsTelemetryClient",
"BotTelemetryProcessor",
"bot_telemetry_processor",
]
|
botbuilder-python/libraries/botbuilder-applicationinsights/botbuilder/applicationinsights/__init__.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-applicationinsights/botbuilder/applicationinsights/__init__.py",
"repo_id": "botbuilder-python",
"token_count": 148
}
| 393 |
[bdist_wheel]
universal=0
|
botbuilder-python/libraries/botbuilder-applicationinsights/setup.cfg/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-applicationinsights/setup.cfg",
"repo_id": "botbuilder-python",
"token_count": 10
}
| 394 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from unittest.mock import Mock
import azure.cosmos.errors as cosmos_errors
from azure.cosmos.cosmos_client import CosmosClient
import pytest
from botbuilder.core import StoreItem
from botbuilder.azure import CosmosDbStorage, CosmosDbConfig
from botbuilder.testing import StorageBaseTests
# local cosmosdb emulator instance cosmos_db_config
COSMOS_DB_CONFIG = CosmosDbConfig(
endpoint="https://localhost:8081",
masterkey="C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw==",
database="test-db",
container="bot-storage",
)
EMULATOR_RUNNING = False
def get_storage():
return CosmosDbStorage(COSMOS_DB_CONFIG)
async def reset():
storage = CosmosDbStorage(COSMOS_DB_CONFIG)
try:
storage.client.DeleteDatabase(database_link="dbs/" + COSMOS_DB_CONFIG.database)
except cosmos_errors.HTTPFailure:
pass
def get_mock_client(identifier: str = "1"):
# pylint: disable=attribute-defined-outside-init, invalid-name
mock = MockClient()
mock.QueryDatabases = Mock(return_value=[])
mock.QueryContainers = Mock(return_value=[])
mock.CreateDatabase = Mock(return_value={"id": identifier})
mock.CreateContainer = Mock(return_value={"id": identifier})
return mock
class MockClient(CosmosClient):
def __init__(self): # pylint: disable=super-init-not-called
pass
class SimpleStoreItem(StoreItem):
def __init__(self, counter=1, e_tag="*"):
super(SimpleStoreItem, self).__init__()
self.counter = counter
self.e_tag = e_tag
class TestCosmosDbStorageConstructor:
@pytest.mark.asyncio
async def test_cosmos_storage_init_should_error_without_cosmos_db_config(self):
try:
CosmosDbStorage(CosmosDbConfig())
except Exception as error:
assert error
@pytest.mark.asyncio
async def test_creation_request_options_are_being_called(self):
# pylint: disable=protected-access
test_config = CosmosDbConfig(
endpoint="https://localhost:8081",
masterkey="C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw==",
database="test-db",
container="bot-storage",
database_creation_options={"OfferThroughput": 1000},
container_creation_options={"OfferThroughput": 500},
)
test_id = "1"
client = get_mock_client(identifier=test_id)
storage = CosmosDbStorage(test_config, client)
storage.database = test_id
assert storage._get_or_create_database(doc_client=client, id=test_id), test_id
client.CreateDatabase.assert_called_with(
{"id": test_id}, test_config.database_creation_options
)
assert storage._get_or_create_container(
doc_client=client, container=test_id
), test_id
client.CreateContainer.assert_called_with(
"dbs/" + test_id, {"id": test_id}, test_config.container_creation_options
)
class TestCosmosDbStorageBaseStorageTests:
@pytest.mark.skipif(not EMULATOR_RUNNING, reason="Needs the emulator to run.")
@pytest.mark.asyncio
async def test_return_empty_object_when_reading_unknown_key(self):
await reset()
test_ran = await StorageBaseTests.return_empty_object_when_reading_unknown_key(
get_storage()
)
assert test_ran
@pytest.mark.skipif(not EMULATOR_RUNNING, reason="Needs the emulator to run.")
@pytest.mark.asyncio
async def test_handle_null_keys_when_reading(self):
await reset()
test_ran = await StorageBaseTests.handle_null_keys_when_reading(get_storage())
assert test_ran
@pytest.mark.skipif(not EMULATOR_RUNNING, reason="Needs the emulator to run.")
@pytest.mark.asyncio
async def test_handle_null_keys_when_writing(self):
await reset()
test_ran = await StorageBaseTests.handle_null_keys_when_writing(get_storage())
assert test_ran
@pytest.mark.skipif(not EMULATOR_RUNNING, reason="Needs the emulator to run.")
@pytest.mark.asyncio
async def test_does_not_raise_when_writing_no_items(self):
await reset()
test_ran = await StorageBaseTests.does_not_raise_when_writing_no_items(
get_storage()
)
assert test_ran
@pytest.mark.skipif(not EMULATOR_RUNNING, reason="Needs the emulator to run.")
@pytest.mark.asyncio
async def test_create_object(self):
await reset()
test_ran = await StorageBaseTests.create_object(get_storage())
assert test_ran
@pytest.mark.skipif(not EMULATOR_RUNNING, reason="Needs the emulator to run.")
@pytest.mark.asyncio
async def test_handle_crazy_keys(self):
await reset()
test_ran = await StorageBaseTests.handle_crazy_keys(get_storage())
assert test_ran
@pytest.mark.skipif(not EMULATOR_RUNNING, reason="Needs the emulator to run.")
@pytest.mark.asyncio
async def test_update_object(self):
await reset()
test_ran = await StorageBaseTests.update_object(get_storage())
assert test_ran
@pytest.mark.skipif(not EMULATOR_RUNNING, reason="Needs the emulator to run.")
@pytest.mark.asyncio
async def test_delete_object(self):
await reset()
test_ran = await StorageBaseTests.delete_object(get_storage())
assert test_ran
@pytest.mark.skipif(not EMULATOR_RUNNING, reason="Needs the emulator to run.")
@pytest.mark.asyncio
async def test_perform_batch_operations(self):
await reset()
test_ran = await StorageBaseTests.perform_batch_operations(get_storage())
assert test_ran
@pytest.mark.skipif(not EMULATOR_RUNNING, reason="Needs the emulator to run.")
@pytest.mark.asyncio
async def test_proceeds_through_waterfall(self):
await reset()
test_ran = await StorageBaseTests.proceeds_through_waterfall(get_storage())
assert test_ran
class TestCosmosDbStorage:
@pytest.mark.skipif(not EMULATOR_RUNNING, reason="Needs the emulator to run.")
@pytest.mark.asyncio
async def test_cosmos_storage_init_should_work_with_just_endpoint_and_key(self):
storage = CosmosDbStorage(
CosmosDbConfig(
endpoint=COSMOS_DB_CONFIG.endpoint, masterkey=COSMOS_DB_CONFIG.masterkey
)
)
await storage.write({"user": SimpleStoreItem()})
data = await storage.read(["user"])
assert "user" in data
assert data["user"].counter == 1
assert len(data.keys()) == 1
@pytest.mark.skipif(not EMULATOR_RUNNING, reason="Needs the emulator to run.")
@pytest.mark.asyncio
async def test_cosmos_storage_read_update_should_return_new_etag(self):
await reset()
storage = CosmosDbStorage(COSMOS_DB_CONFIG)
await storage.write({"test": SimpleStoreItem(counter=1)})
data_result = await storage.read(["test"])
data_result["test"].counter = 2
await storage.write(data_result)
data_updated = await storage.read(["test"])
assert data_updated["test"].counter == 2
assert data_updated["test"].e_tag != data_result["test"].e_tag
@pytest.mark.skipif(not EMULATOR_RUNNING, reason="Needs the emulator to run.")
@pytest.mark.asyncio
async def test_cosmos_storage_read_with_invalid_key_should_return_empty_dict(self):
await reset()
storage = CosmosDbStorage(COSMOS_DB_CONFIG)
data = await storage.read(["test"])
assert isinstance(data, dict)
assert not data.keys()
@pytest.mark.skipif(not EMULATOR_RUNNING, reason="Needs the emulator to run.")
@pytest.mark.asyncio
async def test_cosmos_storage_write_should_overwrite_when_new_e_tag_is_an_asterisk(
self,
):
await reset()
storage = CosmosDbStorage(COSMOS_DB_CONFIG)
await storage.write({"user": SimpleStoreItem()})
await storage.write({"user": SimpleStoreItem(counter=10, e_tag="*")})
data = await storage.read(["user"])
assert data["user"].counter == 10
@pytest.mark.skipif(not EMULATOR_RUNNING, reason="Needs the emulator to run.")
@pytest.mark.asyncio
async def test_cosmos_storage_delete_should_delete_multiple_values_when_given_multiple_valid_keys(
self,
):
await reset()
storage = CosmosDbStorage(COSMOS_DB_CONFIG)
await storage.write({"test": SimpleStoreItem(), "test2": SimpleStoreItem(2)})
await storage.delete(["test", "test2"])
data = await storage.read(["test", "test2"])
assert not data.keys()
@pytest.mark.skipif(not EMULATOR_RUNNING, reason="Needs the emulator to run.")
@pytest.mark.asyncio
async def test_cosmos_storage_delete_should_delete_values_when_given_multiple_valid_keys_and_ignore_other_data(
self,
):
await reset()
storage = CosmosDbStorage(COSMOS_DB_CONFIG)
await storage.write(
{
"test": SimpleStoreItem(),
"test2": SimpleStoreItem(counter=2),
"test3": SimpleStoreItem(counter=3),
}
)
await storage.delete(["test", "test2"])
data = await storage.read(["test", "test2", "test3"])
assert len(data.keys()) == 1
@pytest.mark.skipif(not EMULATOR_RUNNING, reason="Needs the emulator to run.")
@pytest.mark.asyncio
async def test_cosmos_storage_delete_invalid_key_should_do_nothing_and_not_affect_cached_data(
self,
):
await reset()
storage = CosmosDbStorage(COSMOS_DB_CONFIG)
await storage.write({"test": SimpleStoreItem()})
await storage.delete(["foo"])
data = await storage.read(["test"])
assert len(data.keys()) == 1
data = await storage.read(["foo"])
assert not data.keys()
@pytest.mark.skipif(not EMULATOR_RUNNING, reason="Needs the emulator to run.")
@pytest.mark.asyncio
async def test_cosmos_storage_delete_invalid_keys_should_do_nothing_and_not_affect_cached_data(
self,
):
await reset()
storage = CosmosDbStorage(COSMOS_DB_CONFIG)
await storage.write({"test": SimpleStoreItem()})
await storage.delete(["foo", "bar"])
data = await storage.read(["test"])
assert len(data.keys()) == 1
|
botbuilder-python/libraries/botbuilder-azure/tests/test_cosmos_storage.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-azure/tests/test_cosmos_storage.py",
"repo_id": "botbuilder-python",
"token_count": 4398
}
| 395 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from typing import List
from .bot_state import BotState
from .turn_context import TurnContext
class BotStateSet:
def __init__(self, bot_states: List[BotState]):
self.bot_states = list(bot_states)
def add(self, bot_state: BotState) -> "BotStateSet":
if bot_state is None:
raise TypeError("Expected BotState")
self.bot_states.append(bot_state)
return self
async def load_all(self, turn_context: TurnContext, force: bool = False):
for bot_state in self.bot_states:
await bot_state.load(turn_context, force)
async def save_all_changes(self, turn_context: TurnContext, force: bool = False):
for bot_state in self.bot_states:
await bot_state.save_changes(turn_context, force)
|
botbuilder-python/libraries/botbuilder-core/botbuilder/core/bot_state_set.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-core/botbuilder/core/bot_state_set.py",
"repo_id": "botbuilder-python",
"token_count": 323
}
| 396 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from abc import ABC, abstractmethod
from botbuilder.schema import Activity
class QueueStorage(ABC):
"""
A base class for enqueueing an Activity for later processing.
"""
@abstractmethod
async def queue_activity(
self,
activity: Activity,
visibility_timeout: int = None,
time_to_live: int = None,
) -> str:
"""
Enqueues an Activity for later processing. The visibility timeout specifies how long the message should be
visible to Dequeue and Peek operations.
:param activity: The activity to be queued for later processing.
:type activity: :class:`botbuilder.schema.Activity`
:param visibility_timeout: Visibility timeout in seconds. Optional with a default value of 0.
Cannot be larger than 7 days.
:type visibility_timeout: int
:param time_to_live: Specifies the time-to-live interval for the message in seconds.
:type time_to_live: int
:returns: String representing the read receipt.
"""
raise NotImplementedError()
|
botbuilder-python/libraries/botbuilder-core/botbuilder/core/queue_storage.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-core/botbuilder/core/queue_storage.py",
"repo_id": "botbuilder-python",
"token_count": 409
}
| 397 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from logging import Logger
from botbuilder.core import Bot, BotAdapter, ChannelServiceHandler
from botbuilder.schema import (
Activity,
ResourceResponse,
)
from botframework.connector.auth import (
AuthenticationConfiguration,
AuthenticationConstants,
ChannelProvider,
ClaimsIdentity,
CredentialProvider,
GovernmentConstants,
)
from .conversation_id_factory import ConversationIdFactoryBase
class SkillHandler(ChannelServiceHandler):
SKILL_CONVERSATION_REFERENCE_KEY = (
"botbuilder.core.skills.SkillConversationReference"
)
def __init__(
self,
adapter: BotAdapter,
bot: Bot,
conversation_id_factory: ConversationIdFactoryBase,
credential_provider: CredentialProvider,
auth_configuration: AuthenticationConfiguration,
channel_provider: ChannelProvider = None,
logger: Logger = None,
):
# pylint: disable=import-outside-toplevel
super().__init__(credential_provider, auth_configuration, channel_provider)
if not adapter:
raise TypeError("adapter can't be None")
if not bot:
raise TypeError("bot can't be None")
if not conversation_id_factory:
raise TypeError("conversation_id_factory can't be None")
self._logger = logger
def aux_func():
nonlocal self
return (
GovernmentConstants.TO_CHANNEL_FROM_BOT_OAUTH_SCOPE
if self._channel_provider and self._channel_provider.is_government()
else AuthenticationConstants.TO_CHANNEL_FROM_BOT_OAUTH_SCOPE
)
from ._skill_handler_impl import _SkillHandlerImpl
self._inner = _SkillHandlerImpl(
self.SKILL_CONVERSATION_REFERENCE_KEY,
adapter,
bot,
conversation_id_factory,
aux_func,
)
async def on_send_to_conversation(
self,
claims_identity: ClaimsIdentity,
conversation_id: str,
activity: Activity,
) -> ResourceResponse:
"""
send_to_conversation() API for Skill
This method allows you to send an activity to the end of a conversation.
This is slightly different from ReplyToActivity().
* SendToConversation(conversation_id) - will append the activity to the end
of the conversation according to the timestamp or semantics of the channel.
* ReplyToActivity(conversation_id,ActivityId) - adds the activity as a reply
to another activity, if the channel supports it. If the channel does not
support nested replies, ReplyToActivity falls back to SendToConversation.
Use ReplyToActivity when replying to a specific activity in the
conversation.
Use SendToConversation in all other cases.
:param claims_identity: Claims identity for the bot.
:type claims_identity: :class:`botframework.connector.auth.ClaimsIdentity`
:param conversation_id:The conversation ID.
:type conversation_id: str
:param activity: Activity to send.
:type activity: Activity
:return:
"""
return await self._inner.on_send_to_conversation(
claims_identity,
conversation_id,
activity,
)
async def on_reply_to_activity(
self,
claims_identity: ClaimsIdentity,
conversation_id: str,
activity_id: str,
activity: Activity,
) -> ResourceResponse:
"""
reply_to_activity() API for Skill.
This method allows you to reply to an activity.
This is slightly different from SendToConversation().
* SendToConversation(conversation_id) - will append the activity to the end
of the conversation according to the timestamp or semantics of the channel.
* ReplyToActivity(conversation_id,ActivityId) - adds the activity as a reply
to another activity, if the channel supports it. If the channel does not
support nested replies, ReplyToActivity falls back to SendToConversation.
Use ReplyToActivity when replying to a specific activity in the
conversation.
Use SendToConversation in all other cases.
:param claims_identity: Claims identity for the bot.
:type claims_identity: :class:`botframework.connector.auth.ClaimsIdentity`
:param conversation_id:The conversation ID.
:type conversation_id: str
:param activity_id: Activity ID to send.
:type activity_id: str
:param activity: Activity to send.
:type activity: Activity
:return:
"""
return await self._inner.on_reply_to_activity(
claims_identity,
conversation_id,
activity_id,
activity,
)
async def on_delete_activity(
self, claims_identity: ClaimsIdentity, conversation_id: str, activity_id: str
):
await self._inner.on_delete_activity(
claims_identity, conversation_id, activity_id
)
async def on_update_activity(
self,
claims_identity: ClaimsIdentity,
conversation_id: str,
activity_id: str,
activity: Activity,
) -> ResourceResponse:
return await self._inner.on_update_activity(
claims_identity, conversation_id, activity_id, activity
)
|
botbuilder-python/libraries/botbuilder-core/botbuilder/core/skills/skill_handler.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-core/botbuilder/core/skills/skill_handler.py",
"repo_id": "botbuilder-python",
"token_count": 2191
}
| 398 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License
class TelemetryConstants:
"""Telemetry logger property names."""
ATTACHMENTS_PROPERTY: str = "attachments"
CHANNEL_ID_PROPERTY: str = "channelId"
CONVERSATION_ID_PROPERTY: str = "conversationId"
CONVERSATION_NAME_PROPERTY: str = "conversationName"
DIALOG_ID_PROPERTY: str = "dialogId"
FROM_ID_PROPERTY: str = "fromId"
FROM_NAME_PROPERTY: str = "fromName"
LOCALE_PROPERTY: str = "locale"
RECIPIENT_ID_PROPERTY: str = "recipientId"
RECIPIENT_NAME_PROPERTY: str = "recipientName"
REPLY_ACTIVITY_ID_PROPERTY: str = "replyActivityId"
TEXT_PROPERTY: str = "text"
SPEAK_PROPERTY: str = "speak"
USER_ID_PROPERTY: str = "userId"
|
botbuilder-python/libraries/botbuilder-core/botbuilder/core/telemetry_constants.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-core/botbuilder/core/telemetry_constants.py",
"repo_id": "botbuilder-python",
"token_count": 313
}
| 399 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
# pylint: disable=too-many-lines
from typing import List
import aiounittest
from botbuilder.core import BotAdapter, TurnContext
from botbuilder.core.teams import TeamsActivityHandler
from botbuilder.schema import (
Activity,
ActivityTypes,
ChannelAccount,
ConversationAccount,
ConversationReference,
ResourceResponse,
)
from botbuilder.schema.teams import (
AppBasedLinkQuery,
ChannelInfo,
FileConsentCardResponse,
MeetingStartEventDetails,
MeetingEndEventDetails,
MessageActionsPayload,
MessagingExtensionAction,
MessagingExtensionQuery,
O365ConnectorCardActionQuery,
TaskModuleRequest,
TaskModuleRequestContext,
TeamInfo,
TeamsChannelAccount,
TabRequest,
TabSubmit,
TabContext,
)
from botframework.connector import Channels
from simple_adapter import SimpleAdapter
class TestingTeamsActivityHandler(TeamsActivityHandler):
__test__ = False
def __init__(self):
self.record: List[str] = []
async def on_conversation_update_activity(self, turn_context: TurnContext):
self.record.append("on_conversation_update_activity")
return await super().on_conversation_update_activity(turn_context)
async def on_teams_members_added( # pylint: disable=unused-argument
self,
teams_members_added: [TeamsChannelAccount],
team_info: TeamInfo,
turn_context: TurnContext,
):
self.record.append("on_teams_members_added")
return await super().on_teams_members_added(
teams_members_added, team_info, turn_context
)
async def on_teams_members_removed(
self,
teams_members_removed: [TeamsChannelAccount],
team_info: TeamInfo,
turn_context: TurnContext,
):
self.record.append("on_teams_members_removed")
return await super().on_teams_members_removed(
teams_members_removed, team_info, turn_context
)
async def on_message_activity(self, turn_context: TurnContext):
self.record.append("on_message_activity")
return await super().on_message_activity(turn_context)
async def on_token_response_event(self, turn_context: TurnContext):
self.record.append("on_token_response_event")
return await super().on_token_response_event(turn_context)
async def on_event(self, turn_context: TurnContext):
self.record.append("on_event")
return await super().on_event(turn_context)
async def on_end_of_conversation_activity(self, turn_context: TurnContext):
self.record.append("on_end_of_conversation_activity")
return await super().on_end_of_conversation_activity(turn_context)
async def on_typing_activity(self, turn_context: TurnContext):
self.record.append("on_typing_activity")
return await super().on_typing_activity(turn_context)
async def on_unrecognized_activity_type(self, turn_context: TurnContext):
self.record.append("on_unrecognized_activity_type")
return await super().on_unrecognized_activity_type(turn_context)
async def on_teams_channel_created(
self, channel_info: ChannelInfo, team_info: TeamInfo, turn_context: TurnContext
):
self.record.append("on_teams_channel_created")
return await super().on_teams_channel_created(
channel_info, team_info, turn_context
)
async def on_teams_channel_renamed(
self, channel_info: ChannelInfo, team_info: TeamInfo, turn_context: TurnContext
):
self.record.append("on_teams_channel_renamed")
return await super().on_teams_channel_renamed(
channel_info, team_info, turn_context
)
async def on_teams_channel_restored(
self, channel_info: ChannelInfo, team_info: TeamInfo, turn_context: TurnContext
):
self.record.append("on_teams_channel_restored")
return await super().on_teams_channel_restored(
channel_info, team_info, turn_context
)
async def on_teams_channel_deleted(
self, channel_info: ChannelInfo, team_info: TeamInfo, turn_context: TurnContext
):
self.record.append("on_teams_channel_deleted")
return await super().on_teams_channel_renamed(
channel_info, team_info, turn_context
)
async def on_teams_team_archived(
self, team_info: TeamInfo, turn_context: TurnContext
):
self.record.append("on_teams_team_archived")
return await super().on_teams_team_archived(team_info, turn_context)
async def on_teams_team_deleted(
self, team_info: TeamInfo, turn_context: TurnContext
):
self.record.append("on_teams_team_deleted")
return await super().on_teams_team_deleted(team_info, turn_context)
async def on_teams_team_hard_deleted(
self, team_info: TeamInfo, turn_context: TurnContext
):
self.record.append("on_teams_team_hard_deleted")
return await super().on_teams_team_hard_deleted(team_info, turn_context)
async def on_teams_team_renamed_activity(
self, team_info: TeamInfo, turn_context: TurnContext
):
self.record.append("on_teams_team_renamed_activity")
return await super().on_teams_team_renamed_activity(team_info, turn_context)
async def on_teams_team_restored(
self, team_info: TeamInfo, turn_context: TurnContext
):
self.record.append("on_teams_team_restored")
return await super().on_teams_team_restored(team_info, turn_context)
async def on_teams_team_unarchived(
self, team_info: TeamInfo, turn_context: TurnContext
):
self.record.append("on_teams_team_unarchived")
return await super().on_teams_team_unarchived(team_info, turn_context)
async def on_invoke_activity(self, turn_context: TurnContext):
self.record.append("on_invoke_activity")
return await super().on_invoke_activity(turn_context)
async def on_teams_signin_verify_state(self, turn_context: TurnContext):
self.record.append("on_teams_signin_verify_state")
return await super().on_teams_signin_verify_state(turn_context)
async def on_teams_file_consent(
self,
turn_context: TurnContext,
file_consent_card_response: FileConsentCardResponse,
):
self.record.append("on_teams_file_consent")
return await super().on_teams_file_consent(
turn_context, file_consent_card_response
)
async def on_teams_file_consent_accept(
self,
turn_context: TurnContext,
file_consent_card_response: FileConsentCardResponse,
):
self.record.append("on_teams_file_consent_accept")
return await super().on_teams_file_consent_accept(
turn_context, file_consent_card_response
)
async def on_teams_file_consent_decline(
self,
turn_context: TurnContext,
file_consent_card_response: FileConsentCardResponse,
):
self.record.append("on_teams_file_consent_decline")
return await super().on_teams_file_consent_decline(
turn_context, file_consent_card_response
)
async def on_teams_o365_connector_card_action(
self, turn_context: TurnContext, query: O365ConnectorCardActionQuery
):
self.record.append("on_teams_o365_connector_card_action")
return await super().on_teams_o365_connector_card_action(turn_context, query)
async def on_teams_app_based_link_query(
self, turn_context: TurnContext, query: AppBasedLinkQuery
):
self.record.append("on_teams_app_based_link_query")
return await super().on_teams_app_based_link_query(turn_context, query)
async def on_teams_messaging_extension_query(
self, turn_context: TurnContext, query: MessagingExtensionQuery
):
self.record.append("on_teams_messaging_extension_query")
return await super().on_teams_messaging_extension_query(turn_context, query)
async def on_teams_messaging_extension_submit_action_dispatch(
self, turn_context: TurnContext, action: MessagingExtensionAction
):
self.record.append("on_teams_messaging_extension_submit_action_dispatch")
return await super().on_teams_messaging_extension_submit_action_dispatch(
turn_context, action
)
async def on_teams_messaging_extension_submit_action(
self, turn_context: TurnContext, action: MessagingExtensionAction
):
self.record.append("on_teams_messaging_extension_submit_action")
return await super().on_teams_messaging_extension_submit_action(
turn_context, action
)
async def on_teams_messaging_extension_bot_message_preview_edit(
self, turn_context: TurnContext, action: MessagingExtensionAction
):
self.record.append("on_teams_messaging_extension_bot_message_preview_edit")
return await super().on_teams_messaging_extension_bot_message_preview_edit(
turn_context, action
)
async def on_teams_messaging_extension_bot_message_preview_send(
self, turn_context: TurnContext, action: MessagingExtensionAction
):
self.record.append("on_teams_messaging_extension_bot_message_preview_send")
return await super().on_teams_messaging_extension_bot_message_preview_send(
turn_context, action
)
async def on_teams_messaging_extension_fetch_task(
self, turn_context: TurnContext, action: MessagingExtensionAction
):
self.record.append("on_teams_messaging_extension_fetch_task")
return await super().on_teams_messaging_extension_fetch_task(
turn_context, action
)
async def on_teams_messaging_extension_configuration_query_settings_url(
self, turn_context: TurnContext, query: MessagingExtensionQuery
):
self.record.append(
"on_teams_messaging_extension_configuration_query_settings_url"
)
return (
await super().on_teams_messaging_extension_configuration_query_settings_url(
turn_context, query
)
)
async def on_teams_messaging_extension_configuration_setting(
self, turn_context: TurnContext, settings
):
self.record.append("on_teams_messaging_extension_configuration_setting")
return await super().on_teams_messaging_extension_configuration_setting(
turn_context, settings
)
async def on_teams_messaging_extension_card_button_clicked(
self, turn_context: TurnContext, card_data
):
self.record.append("on_teams_messaging_extension_card_button_clicked")
return await super().on_teams_messaging_extension_card_button_clicked(
turn_context, card_data
)
async def on_teams_task_module_fetch(
self, turn_context: TurnContext, task_module_request
):
self.record.append("on_teams_task_module_fetch")
return await super().on_teams_task_module_fetch(
turn_context, task_module_request
)
async def on_teams_task_module_submit( # pylint: disable=unused-argument
self, turn_context: TurnContext, task_module_request: TaskModuleRequest
):
self.record.append("on_teams_task_module_submit")
return await super().on_teams_task_module_submit(
turn_context, task_module_request
)
async def on_teams_tab_fetch(
self, turn_context: TurnContext, tab_request: TabRequest
):
self.record.append("on_teams_tab_fetch")
return await super().on_teams_tab_fetch(turn_context, tab_request)
async def on_teams_tab_submit(
self, turn_context: TurnContext, tab_submit: TabSubmit
):
self.record.append("on_teams_tab_submit")
return await super().on_teams_tab_submit(turn_context, tab_submit)
async def on_event_activity(self, turn_context: TurnContext):
self.record.append("on_event_activity")
return await super().on_event_activity(turn_context)
async def on_teams_meeting_start_event(
self, meeting: MeetingStartEventDetails, turn_context: TurnContext
):
self.record.append("on_teams_meeting_start_event")
return await super().on_teams_meeting_start_event(
turn_context.activity.value, turn_context
)
async def on_teams_meeting_end_event(
self, meeting: MeetingEndEventDetails, turn_context: TurnContext
):
self.record.append("on_teams_meeting_end_event")
return await super().on_teams_meeting_end_event(
turn_context.activity.value, turn_context
)
class NotImplementedAdapter(BotAdapter):
async def delete_activity(
self, context: TurnContext, reference: ConversationReference
):
raise NotImplementedError()
async def send_activities(
self, context: TurnContext, activities: List[Activity]
) -> List[ResourceResponse]:
raise NotImplementedError()
async def update_activity(self, context: TurnContext, activity: Activity):
raise NotImplementedError()
class TestTeamsActivityHandler(aiounittest.AsyncTestCase):
async def test_on_teams_channel_created_activity(self):
# arrange
activity = Activity(
type=ActivityTypes.conversation_update,
channel_data={
"eventType": "channelCreated",
"channel": {"id": "asdfqwerty", "name": "new_channel"},
},
channel_id=Channels.ms_teams,
)
turn_context = TurnContext(NotImplementedAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 2
assert bot.record[0] == "on_conversation_update_activity"
assert bot.record[1] == "on_teams_channel_created"
async def test_on_teams_channel_renamed_activity(self):
# arrange
activity = Activity(
type=ActivityTypes.conversation_update,
channel_data={
"eventType": "channelRenamed",
"channel": {"id": "asdfqwerty", "name": "new_channel"},
},
channel_id=Channels.ms_teams,
)
turn_context = TurnContext(NotImplementedAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 2
assert bot.record[0] == "on_conversation_update_activity"
assert bot.record[1] == "on_teams_channel_renamed"
async def test_on_teams_channel_restored_activity(self):
# arrange
activity = Activity(
type=ActivityTypes.conversation_update,
channel_data={
"eventType": "channelRestored",
"channel": {"id": "asdfqwerty", "name": "channel_restored"},
},
channel_id=Channels.ms_teams,
)
turn_context = TurnContext(NotImplementedAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 2
assert bot.record[0] == "on_conversation_update_activity"
assert bot.record[1] == "on_teams_channel_restored"
async def test_on_teams_channel_deleted_activity(self):
# arrange
activity = Activity(
type=ActivityTypes.conversation_update,
channel_data={
"eventType": "channelDeleted",
"channel": {"id": "asdfqwerty", "name": "new_channel"},
},
channel_id=Channels.ms_teams,
)
turn_context = TurnContext(NotImplementedAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 2
assert bot.record[0] == "on_conversation_update_activity"
assert bot.record[1] == "on_teams_channel_deleted"
async def test_on_teams_team_archived(self):
# arrange
activity = Activity(
type=ActivityTypes.conversation_update,
channel_data={
"eventType": "teamArchived",
"team": {"id": "team_id_1", "name": "archived_team_name"},
},
channel_id=Channels.ms_teams,
)
turn_context = TurnContext(NotImplementedAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 2
assert bot.record[0] == "on_conversation_update_activity"
assert bot.record[1] == "on_teams_team_archived"
async def test_on_teams_team_deleted(self):
# arrange
activity = Activity(
type=ActivityTypes.conversation_update,
channel_data={
"eventType": "teamDeleted",
"team": {"id": "team_id_1", "name": "deleted_team_name"},
},
channel_id=Channels.ms_teams,
)
turn_context = TurnContext(NotImplementedAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 2
assert bot.record[0] == "on_conversation_update_activity"
assert bot.record[1] == "on_teams_team_deleted"
async def test_on_teams_team_hard_deleted(self):
# arrange
activity = Activity(
type=ActivityTypes.conversation_update,
channel_data={
"eventType": "teamHardDeleted",
"team": {"id": "team_id_1", "name": "hard_deleted_team_name"},
},
channel_id=Channels.ms_teams,
)
turn_context = TurnContext(NotImplementedAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 2
assert bot.record[0] == "on_conversation_update_activity"
assert bot.record[1] == "on_teams_team_hard_deleted"
async def test_on_teams_team_renamed_activity(self):
# arrange
activity = Activity(
type=ActivityTypes.conversation_update,
channel_data={
"eventType": "teamRenamed",
"team": {"id": "team_id_1", "name": "new_team_name"},
},
channel_id=Channels.ms_teams,
)
turn_context = TurnContext(NotImplementedAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 2
assert bot.record[0] == "on_conversation_update_activity"
assert bot.record[1] == "on_teams_team_renamed_activity"
async def test_on_teams_team_restored(self):
# arrange
activity = Activity(
type=ActivityTypes.conversation_update,
channel_data={
"eventType": "teamRestored",
"team": {"id": "team_id_1", "name": "restored_team_name"},
},
channel_id=Channels.ms_teams,
)
turn_context = TurnContext(NotImplementedAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 2
assert bot.record[0] == "on_conversation_update_activity"
assert bot.record[1] == "on_teams_team_restored"
async def test_on_teams_team_unarchived(self):
# arrange
activity = Activity(
type=ActivityTypes.conversation_update,
channel_data={
"eventType": "teamUnarchived",
"team": {"id": "team_id_1", "name": "unarchived_team_name"},
},
channel_id=Channels.ms_teams,
)
turn_context = TurnContext(NotImplementedAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 2
assert bot.record[0] == "on_conversation_update_activity"
assert bot.record[1] == "on_teams_team_unarchived"
async def test_on_teams_members_added_activity(self):
# arrange
activity = Activity(
type=ActivityTypes.conversation_update,
channel_data={
"eventType": "teamMemberAdded",
"team": {"id": "team_id_1", "name": "new_team_name"},
},
members_added=[
ChannelAccount(
id="123",
name="test_user",
aad_object_id="asdfqwerty",
role="tester",
)
],
channel_id=Channels.ms_teams,
conversation=ConversationAccount(id="456"),
)
turn_context = TurnContext(SimpleAdapter(), activity)
mock_connector_client = await SimpleAdapter.create_connector_client(
self, turn_context.activity.service_url
)
turn_context.turn_state[
BotAdapter.BOT_CONNECTOR_CLIENT_KEY
] = mock_connector_client
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 2
assert bot.record[0] == "on_conversation_update_activity"
assert bot.record[1] == "on_teams_members_added"
async def test_bot_on_teams_members_added_activity(self):
# arrange
activity = Activity(
recipient=ChannelAccount(id="botid"),
type=ActivityTypes.conversation_update,
channel_data={
"eventType": "teamMemberAdded",
"team": {"id": "team_id_1", "name": "new_team_name"},
},
members_added=[
ChannelAccount(
id="botid",
name="test_user",
aad_object_id="asdfqwerty",
role="tester",
)
],
channel_id=Channels.ms_teams,
conversation=ConversationAccount(id="456"),
)
turn_context = TurnContext(SimpleAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 2
assert bot.record[0] == "on_conversation_update_activity"
assert bot.record[1] == "on_teams_members_added"
async def test_on_teams_members_removed_activity(self):
# arrange
activity = Activity(
type=ActivityTypes.conversation_update,
channel_data={
"eventType": "teamMemberRemoved",
"team": {"id": "team_id_1", "name": "new_team_name"},
},
members_removed=[
ChannelAccount(
id="123",
name="test_user",
aad_object_id="asdfqwerty",
role="tester",
)
],
channel_id=Channels.ms_teams,
)
turn_context = TurnContext(SimpleAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 2
assert bot.record[0] == "on_conversation_update_activity"
assert bot.record[1] == "on_teams_members_removed"
async def test_on_signin_verify_state(self):
# arrange
activity = Activity(type=ActivityTypes.invoke, name="signin/verifyState")
turn_context = TurnContext(SimpleAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 2
assert bot.record[0] == "on_invoke_activity"
assert bot.record[1] == "on_teams_signin_verify_state"
async def test_on_file_consent_accept_activity(self):
# arrange
activity = Activity(
type=ActivityTypes.invoke,
name="fileConsent/invoke",
value={"action": "accept"},
)
turn_context = TurnContext(SimpleAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 3
assert bot.record[0] == "on_invoke_activity"
assert bot.record[1] == "on_teams_file_consent"
assert bot.record[2] == "on_teams_file_consent_accept"
async def test_on_file_consent_decline_activity(self):
# Arrange
activity = Activity(
type=ActivityTypes.invoke,
name="fileConsent/invoke",
value={"action": "decline"},
)
turn_context = TurnContext(SimpleAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 3
assert bot.record[0] == "on_invoke_activity"
assert bot.record[1] == "on_teams_file_consent"
assert bot.record[2] == "on_teams_file_consent_decline"
async def test_on_file_consent_bad_action_activity(self):
# Arrange
activity = Activity(
type=ActivityTypes.invoke,
name="fileConsent/invoke",
value={"action": "bad_action"},
)
turn_context = TurnContext(SimpleAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 2
assert bot.record[0] == "on_invoke_activity"
assert bot.record[1] == "on_teams_file_consent"
async def test_on_teams_o365_connector_card_action(self):
# arrange
activity = Activity(
type=ActivityTypes.invoke,
name="actionableMessage/executeAction",
value={"body": "body_here", "actionId": "action_id_here"},
)
turn_context = TurnContext(SimpleAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 2
assert bot.record[0] == "on_invoke_activity"
assert bot.record[1] == "on_teams_o365_connector_card_action"
async def test_on_app_based_link_query(self):
# arrange
activity = Activity(
type=ActivityTypes.invoke,
name="composeExtension/query",
value={"url": "http://www.test.com"},
)
turn_context = TurnContext(SimpleAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 2
assert bot.record[0] == "on_invoke_activity"
assert bot.record[1] == "on_teams_messaging_extension_query"
async def test_on_teams_messaging_extension_bot_message_preview_edit_activity(self):
# Arrange
activity = Activity(
type=ActivityTypes.invoke,
name="composeExtension/submitAction",
value={
"data": {"key": "value"},
"context": {"theme": "dark"},
"commandId": "test_command",
"commandContext": "command_context_test",
"botMessagePreviewAction": "edit",
"botActivityPreview": [{"id": "activity123"}],
"messagePayload": {"id": "payloadid"},
},
)
turn_context = TurnContext(SimpleAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 3
assert bot.record[0] == "on_invoke_activity"
assert bot.record[1] == "on_teams_messaging_extension_submit_action_dispatch"
assert bot.record[2] == "on_teams_messaging_extension_bot_message_preview_edit"
async def test_on_teams_messaging_extension_bot_message_send_activity(self):
# Arrange
activity = Activity(
type=ActivityTypes.invoke,
name="composeExtension/submitAction",
value={
"data": {"key": "value"},
"context": {"theme": "dark"},
"commandId": "test_command",
"commandContext": "command_context_test",
"botMessagePreviewAction": "send",
"botActivityPreview": [{"id": "123"}],
"messagePayload": {"id": "abc"},
},
)
turn_context = TurnContext(SimpleAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 3
assert bot.record[0] == "on_invoke_activity"
assert bot.record[1] == "on_teams_messaging_extension_submit_action_dispatch"
assert bot.record[2] == "on_teams_messaging_extension_bot_message_preview_send"
async def test_on_teams_messaging_extension_bot_message_send_activity_with_none(
self,
):
# Arrange
activity = Activity(
type=ActivityTypes.invoke,
name="composeExtension/submitAction",
value={
"data": {"key": "value"},
"context": {"theme": "dark"},
"commandId": "test_command",
"commandContext": "command_context_test",
"botMessagePreviewAction": None,
"botActivityPreview": [{"id": "test123"}],
"messagePayload": {"id": "payloadid123"},
},
)
turn_context = TurnContext(SimpleAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 3
assert bot.record[0] == "on_invoke_activity"
assert bot.record[1] == "on_teams_messaging_extension_submit_action_dispatch"
assert bot.record[2] == "on_teams_messaging_extension_submit_action"
async def test_on_teams_messaging_extension_bot_message_send_activity_with_empty_string(
self,
):
# Arrange
activity = Activity(
type=ActivityTypes.invoke,
name="composeExtension/submitAction",
value={
"data": {"key": "value"},
"context": {"theme": "dark"},
"commandId": "test_command",
"commandContext": "command_context_test",
"botMessagePreviewAction": "",
"botActivityPreview": [Activity().serialize()],
"messagePayload": MessageActionsPayload().serialize(),
},
)
turn_context = TurnContext(SimpleAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 3
assert bot.record[0] == "on_invoke_activity"
assert bot.record[1] == "on_teams_messaging_extension_submit_action_dispatch"
assert bot.record[2] == "on_teams_messaging_extension_submit_action"
async def test_on_teams_messaging_extension_fetch_task(self):
# Arrange
activity = Activity(
type=ActivityTypes.invoke,
name="composeExtension/fetchTask",
value={
"data": {"key": "value"},
"context": {"theme": "dark"},
"commandId": "test_command",
"commandContext": "command_context_test",
"botMessagePreviewAction": "message_action",
"botActivityPreview": [{"id": "123"}],
"messagePayload": {"id": "abc123"},
},
)
turn_context = TurnContext(SimpleAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 2
assert bot.record[0] == "on_invoke_activity"
assert bot.record[1] == "on_teams_messaging_extension_fetch_task"
async def test_on_teams_messaging_extension_configuration_query_settings_url(self):
# Arrange
activity = Activity(
type=ActivityTypes.invoke,
name="composeExtension/querySettingUrl",
value={
"commandId": "test_command",
"parameters": [],
"messagingExtensionQueryOptions": {"skip": 1, "count": 1},
"state": "state_string",
},
)
turn_context = TurnContext(SimpleAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 2
assert bot.record[0] == "on_invoke_activity"
assert (
bot.record[1]
== "on_teams_messaging_extension_configuration_query_settings_url"
)
async def test_on_teams_messaging_extension_configuration_setting(self):
# Arrange
activity = Activity(
type=ActivityTypes.invoke,
name="composeExtension/setting",
value={"key": "value"},
)
turn_context = TurnContext(SimpleAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 2
assert bot.record[0] == "on_invoke_activity"
assert bot.record[1] == "on_teams_messaging_extension_configuration_setting"
async def test_on_teams_messaging_extension_card_button_clicked(self):
# Arrange
activity = Activity(
type=ActivityTypes.invoke,
name="composeExtension/onCardButtonClicked",
value={"key": "value"},
)
turn_context = TurnContext(SimpleAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 2
assert bot.record[0] == "on_invoke_activity"
assert bot.record[1] == "on_teams_messaging_extension_card_button_clicked"
async def test_on_teams_task_module_fetch(self):
# Arrange
activity = Activity(
type=ActivityTypes.invoke,
name="task/fetch",
value={
"data": {"key": "value"},
"context": TaskModuleRequestContext().serialize(),
},
)
turn_context = TurnContext(SimpleAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 2
assert bot.record[0] == "on_invoke_activity"
assert bot.record[1] == "on_teams_task_module_fetch"
async def test_on_teams_task_module_fetch_none_as_empty(self):
# Arrange
activity = Activity(
type=ActivityTypes.invoke,
name="task/fetch",
value={
"data": {"key": "value"},
"context": "",
},
)
turn_context = TurnContext(SimpleAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 2
assert bot.record[0] == "on_invoke_activity"
assert bot.record[1] == "on_teams_task_module_fetch"
async def test_on_teams_task_module_submit(self):
# Arrange
activity = Activity(
type=ActivityTypes.invoke,
name="task/submit",
value={
"data": {"key": "value"},
"context": TaskModuleRequestContext().serialize(),
},
)
turn_context = TurnContext(SimpleAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 2
assert bot.record[0] == "on_invoke_activity"
assert bot.record[1] == "on_teams_task_module_submit"
async def test_on_teams_tab_fetch(self):
# Arrange
activity = Activity(
type=ActivityTypes.invoke,
name="tab/fetch",
value={
"data": {"key": "value"},
"context": TabContext().serialize(),
},
)
turn_context = TurnContext(SimpleAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 2
assert bot.record[0] == "on_invoke_activity"
assert bot.record[1] == "on_teams_tab_fetch"
async def test_on_teams_tab_submit(self):
# Arrange
activity = Activity(
type=ActivityTypes.invoke,
name="tab/submit",
value={
"data": {"key": "value"},
"context": TabContext().serialize(),
},
)
turn_context = TurnContext(SimpleAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
# Assert
assert len(bot.record) == 2
assert bot.record[0] == "on_invoke_activity"
assert bot.record[1] == "on_teams_tab_submit"
async def test_on_end_of_conversation_activity(self):
activity = Activity(type=ActivityTypes.end_of_conversation)
turn_context = TurnContext(SimpleAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
assert len(bot.record) == 1
assert bot.record[0] == "on_end_of_conversation_activity"
async def test_typing_activity(self):
activity = Activity(type=ActivityTypes.typing)
turn_context = TurnContext(SimpleAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
assert len(bot.record) == 1
assert bot.record[0] == "on_typing_activity"
async def test_on_teams_meeting_start_event(self):
activity = Activity(
type=ActivityTypes.event,
name="application/vnd.microsoft.meetingStart",
channel_id=Channels.ms_teams,
)
turn_context = TurnContext(SimpleAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
assert len(bot.record) == 2
assert bot.record[0] == "on_event_activity"
assert bot.record[1] == "on_teams_meeting_start_event"
async def test_on_teams_meeting_end_event(self):
activity = Activity(
type=ActivityTypes.event,
name="application/vnd.microsoft.meetingEnd",
channel_id=Channels.ms_teams,
)
turn_context = TurnContext(SimpleAdapter(), activity)
# Act
bot = TestingTeamsActivityHandler()
await bot.on_turn(turn_context)
assert len(bot.record) == 2
assert bot.record[0] == "on_event_activity"
assert bot.record[1] == "on_teams_meeting_end_event"
|
botbuilder-python/libraries/botbuilder-core/tests/teams/test_teams_activity_handler.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-core/tests/teams/test_teams_activity_handler.py",
"repo_id": "botbuilder-python",
"token_count": 17868
}
| 400 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from botbuilder.schema import (
Activity,
ActivityTypes,
ChannelAccount,
ConversationAccount,
)
class TestMessage:
@staticmethod
def message(id: str = "1234") -> Activity: # pylint: disable=invalid-name
return Activity(
type=ActivityTypes.message,
id=id,
text="test",
from_property=ChannelAccount(id="user", name="User Name"),
recipient=ChannelAccount(id="bot", name="Bot Name"),
conversation=ConversationAccount(id="convo", name="Convo Name"),
channel_id="UnitTest",
service_url="https://example.org",
)
|
botbuilder-python/libraries/botbuilder-core/tests/test_message.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-core/tests/test_message.py",
"repo_id": "botbuilder-python",
"token_count": 299
}
| 401 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from botbuilder.core import TurnContext
from botframework.connector import Channels
class Channel:
"""
Methods for determining channel-specific functionality.
"""
@staticmethod
def supports_suggested_actions(channel_id: str, button_cnt: int = 100) -> bool:
"""Determine if a number of Suggested Actions are supported by a Channel.
Args:
channel_id (str): The Channel to check the if Suggested Actions are supported in.
button_cnt (int, optional): Defaults to 100. The number of Suggested Actions to check for the Channel.
Returns:
bool: True if the Channel supports the button_cnt total Suggested Actions, False if the Channel does not
support that number of Suggested Actions.
"""
max_actions = {
# https://developers.facebook.com/docs/messenger-platform/send-messages/quick-replies
Channels.facebook: 10,
Channels.skype: 10,
# https://developers.line.biz/en/reference/messaging-api/#items-object
Channels.line: 13,
# https://dev.kik.com/#/docs/messaging#text-response-object
Channels.kik: 20,
Channels.telegram: 100,
Channels.emulator: 100,
Channels.direct_line: 100,
Channels.webchat: 100,
}
return (
button_cnt <= max_actions[channel_id]
if channel_id in max_actions
else False
)
@staticmethod
def supports_card_actions(channel_id: str, button_cnt: int = 100) -> bool:
"""Determine if a number of Card Actions are supported by a Channel.
Args:
channel_id (str): The Channel to check if the Card Actions are supported in.
button_cnt (int, optional): Defaults to 100. The number of Card Actions to check for the Channel.
Returns:
bool: True if the Channel supports the button_cnt total Card Actions, False if the Channel does not support
that number of Card Actions.
"""
max_actions = {
Channels.facebook: 3,
Channels.skype: 3,
Channels.ms_teams: 3,
Channels.line: 99,
Channels.slack: 100,
Channels.telegram: 100,
Channels.emulator: 100,
Channels.direct_line: 100,
Channels.webchat: 100,
}
return (
button_cnt <= max_actions[channel_id]
if channel_id in max_actions
else False
)
@staticmethod
def has_message_feed(_: str) -> bool:
"""Determine if a Channel has a Message Feed.
Args:
channel_id (str): The Channel to check for Message Feed.
Returns:
bool: True if the Channel has a Message Feed, False if it does not.
"""
return True
@staticmethod
def max_action_title_length( # pylint: disable=unused-argument
channel_id: str,
) -> int:
"""Maximum length allowed for Action Titles.
Args:
channel_id (str): The Channel to determine Maximum Action Title Length.
Returns:
int: The total number of characters allowed for an Action Title on a specific Channel.
"""
return 20
@staticmethod
def get_channel_id(turn_context: TurnContext) -> str:
"""Get the Channel Id from the current Activity on the Turn Context.
Args:
turn_context (TurnContext): The Turn Context to retrieve the Activity's Channel Id from.
Returns:
str: The Channel Id from the Turn Context's Activity.
"""
if turn_context.activity.channel_id is None:
return ""
return turn_context.activity.channel_id
|
botbuilder-python/libraries/botbuilder-dialogs/botbuilder/dialogs/choices/channel.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-dialogs/botbuilder/dialogs/choices/channel.py",
"repo_id": "botbuilder-python",
"token_count": 1635
}
| 402 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from abc import ABC, abstractmethod
from botbuilder.core import TurnContext, NullTelemetryClient, BotTelemetryClient
from .dialog_reason import DialogReason
from .dialog_event import DialogEvent
from .dialog_turn_status import DialogTurnStatus
from .dialog_turn_result import DialogTurnResult
from .dialog_instance import DialogInstance
class Dialog(ABC):
end_of_turn = DialogTurnResult(DialogTurnStatus.Waiting)
def __init__(self, dialog_id: str):
if dialog_id is None or not dialog_id.strip():
raise TypeError("Dialog(): dialogId cannot be None.")
self._telemetry_client = NullTelemetryClient()
self._id = dialog_id
@property
def id(self) -> str: # pylint: disable=invalid-name
return self._id
@property
def telemetry_client(self) -> BotTelemetryClient:
"""
Gets the telemetry client for logging events.
"""
return self._telemetry_client
@telemetry_client.setter
def telemetry_client(self, value: BotTelemetryClient) -> None:
"""
Sets the telemetry client for logging events.
"""
if value is None:
self._telemetry_client = NullTelemetryClient()
else:
self._telemetry_client = value
@abstractmethod
async def begin_dialog(
self, dialog_context: "DialogContext", options: object = None
):
"""
Method called when a new dialog has been pushed onto the stack and is being activated.
:param dialog_context: The dialog context for the current turn of conversation.
:param options: (Optional) additional argument(s) to pass to the dialog being started.
"""
raise NotImplementedError()
async def continue_dialog(self, dialog_context: "DialogContext"):
"""
Method called when an instance of the dialog is the "current" dialog and the
user replies with a new activity. The dialog will generally continue to receive the user's
replies until it calls either `end_dialog()` or `begin_dialog()`.
If this method is NOT implemented then the dialog will automatically be ended when the user replies.
:param dialog_context: The dialog context for the current turn of conversation.
:return:
"""
# By default just end the current dialog.
return await dialog_context.end_dialog(None)
async def resume_dialog( # pylint: disable=unused-argument
self, dialog_context: "DialogContext", reason: DialogReason, result: object
):
"""
Method called when an instance of the dialog is being returned to from another
dialog that was started by the current instance using `begin_dialog()`.
If this method is NOT implemented then the dialog will be automatically ended with a call
to `end_dialog()`. Any result passed from the called dialog will be passed
to the current dialog's parent.
:param dialog_context: The dialog context for the current turn of conversation.
:param reason: Reason why the dialog resumed.
:param result: (Optional) value returned from the dialog that was called. The type of the value returned is
dependent on the dialog that was called.
:return:
"""
# By default just end the current dialog and return result to parent.
return await dialog_context.end_dialog(result)
# TODO: instance is DialogInstance
async def reprompt_dialog( # pylint: disable=unused-argument
self, context: TurnContext, instance: DialogInstance
):
"""
:param context:
:param instance:
:return:
"""
# No-op by default
return
# TODO: instance is DialogInstance
async def end_dialog( # pylint: disable=unused-argument
self, context: TurnContext, instance: DialogInstance, reason: DialogReason
):
"""
:param context:
:param instance:
:param reason:
:return:
"""
# No-op by default
return
def get_version(self) -> str:
return self.id
async def on_dialog_event(
self, dialog_context: "DialogContext", dialog_event: DialogEvent
) -> bool:
"""
Called when an event has been raised, using `DialogContext.emitEvent()`, by either the current dialog or a
dialog that the current dialog started.
:param dialog_context: The dialog context for the current turn of conversation.
:param dialog_event: The event being raised.
:return: True if the event is handled by the current dialog and bubbling should stop.
"""
# Before bubble
handled = await self._on_pre_bubble_event(dialog_context, dialog_event)
# Bubble as needed
if (not handled) and dialog_event.bubble and dialog_context.parent:
handled = await dialog_context.parent.emit(
dialog_event.name, dialog_event.value, True, False
)
# Post bubble
if not handled:
handled = await self._on_post_bubble_event(dialog_context, dialog_event)
return handled
async def _on_pre_bubble_event( # pylint: disable=unused-argument
self, dialog_context: "DialogContext", dialog_event: DialogEvent
) -> bool:
"""
Called before an event is bubbled to its parent.
This is a good place to perform interception of an event as returning `true` will prevent
any further bubbling of the event to the dialogs parents and will also prevent any child
dialogs from performing their default processing.
:param dialog_context: The dialog context for the current turn of conversation.
:param dialog_event: The event being raised.
:return: Whether the event is handled by the current dialog and further processing should stop.
"""
return False
async def _on_post_bubble_event( # pylint: disable=unused-argument
self, dialog_context: "DialogContext", dialog_event: DialogEvent
) -> bool:
"""
Called after an event was bubbled to all parents and wasn't handled.
This is a good place to perform default processing logic for an event. Returning `true` will
prevent any processing of the event by child dialogs.
:param dialog_context: The dialog context for the current turn of conversation.
:param dialog_event: The event being raised.
:return: Whether the event is handled by the current dialog and further processing should stop.
"""
return False
def _on_compute_id(self) -> str:
"""
Computes an unique ID for a dialog.
:return: An unique ID for a dialog
"""
return self.__class__.__name__
def _register_source_location(
self, path: str, line_number: int
): # pylint: disable=unused-argument
"""
Registers a SourceRange in the provided location.
:param path: The path to the source file.
:param line_number: The line number where the source will be located on the file.
:return:
"""
if path:
# This will be added when debbuging support is ported.
# DebugSupport.source_map.add(self, SourceRange(
# path = path,
# start_point = SourcePoint(line_index = line_number, char_index = 0 ),
# end_point = SourcePoint(line_index = line_number + 1, char_index = 0 ),
# )
return
|
botbuilder-python/libraries/botbuilder-dialogs/botbuilder/dialogs/dialog.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-dialogs/botbuilder/dialogs/dialog.py",
"repo_id": "botbuilder-python",
"token_count": 2800
}
| 403 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from abc import ABC, abstractmethod
from typing import Iterable
from botbuilder.dialogs.memory.scopes import MemoryScope
class ComponentMemoryScopesBase(ABC):
@abstractmethod
def get_memory_scopes(self) -> Iterable[MemoryScope]:
raise NotImplementedError()
|
botbuilder-python/libraries/botbuilder-dialogs/botbuilder/dialogs/memory/component_memory_scopes_base.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-dialogs/botbuilder/dialogs/memory/component_memory_scopes_base.py",
"repo_id": "botbuilder-python",
"token_count": 108
}
| 404 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from collections import namedtuple
from botbuilder.dialogs.memory import scope_path
from .memory_scope import MemoryScope
class ClassMemoryScope(MemoryScope):
def __init__(self):
super().__init__(scope_path.SETTINGS, include_in_snapshot=False)
def get_memory(self, dialog_context: "DialogContext") -> object:
if not dialog_context:
raise TypeError(f"Expecting: DialogContext, but received None")
# if active dialog is a container dialog then "dialogclass" binds to it.
if dialog_context.active_dialog:
dialog = dialog_context.find_dialog_sync(dialog_context.active_dialog.id)
if dialog:
return ClassMemoryScope._bind_to_dialog_context(dialog, dialog_context)
return None
def set_memory(self, dialog_context: "DialogContext", memory: object):
raise Exception(
f"{self.__class__.__name__}.set_memory not supported (read only)"
)
@staticmethod
def _bind_to_dialog_context(obj, dialog_context: "DialogContext") -> object:
clone = {}
for prop in dir(obj):
# don't process double underscore attributes
if prop[:1] != "_":
prop_value = getattr(obj, prop)
if not callable(prop_value):
# the only objects
if hasattr(prop_value, "try_get_value"):
clone[prop] = prop_value.try_get_value(dialog_context.state)
elif hasattr(prop_value, "__dict__") and not isinstance(
prop_value, type(prop_value)
):
clone[prop] = ClassMemoryScope._bind_to_dialog_context(
prop_value, dialog_context
)
else:
clone[prop] = prop_value
if clone:
ReadOnlyObject = namedtuple( # pylint: disable=invalid-name
"ReadOnlyObject", clone
)
return ReadOnlyObject(**clone)
return None
|
botbuilder-python/libraries/botbuilder-dialogs/botbuilder/dialogs/memory/scopes/class_memory_scope.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-dialogs/botbuilder/dialogs/memory/scopes/class_memory_scope.py",
"repo_id": "botbuilder-python",
"token_count": 1007
}
| 405 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from typing import Callable, Dict, List
from botbuilder.core import TurnContext
from botbuilder.dialogs.choices import (
Choice,
ChoiceFactoryOptions,
ChoiceRecognizers,
FindChoicesOptions,
ListStyle,
)
from botbuilder.schema import Activity, ActivityTypes
from .prompt import Prompt
from .prompt_culture_models import PromptCultureModels
from .prompt_options import PromptOptions
from .prompt_validator_context import PromptValidatorContext
from .prompt_recognizer_result import PromptRecognizerResult
class ChoicePrompt(Prompt):
"""
Prompts a user to select from a list of choices.
By default the prompt will return to the calling dialog a `FoundChoice` object containing the choice that
was selected.
"""
_default_choice_options: Dict[str, ChoiceFactoryOptions] = {
c.locale: ChoiceFactoryOptions(
inline_separator=c.separator,
inline_or=c.inline_or_more,
inline_or_more=c.inline_or_more,
include_numbers=True,
)
for c in PromptCultureModels.get_supported_cultures()
}
def __init__(
self,
dialog_id: str,
validator: Callable[[PromptValidatorContext], bool] = None,
default_locale: str = None,
choice_defaults: Dict[str, ChoiceFactoryOptions] = None,
):
"""
:param dialog_id: Unique ID of the dialog within its parent `DialogSet`.
:param validator: (Optional) validator that will be called each time the user responds to the prompt.
If the validator replies with a message no additional retry prompt will be sent.
:param default_locale: (Optional) locale to use if `dc.context.activity.locale` not specified.
Defaults to a value of `en-us`.
:param choice_defaults: (Optional) Overrides the dictionary of
Bot Framework SDK-supported _default_choice_options.
As type Dict[str, ChoiceFactoryOptions], the key is a string of the locale, such as "en-us".
* Must be passed in to each ConfirmPrompt that needs the custom choice defaults.
"""
super().__init__(dialog_id, validator)
self.style = ListStyle.auto
self.default_locale = default_locale
self.choice_options: ChoiceFactoryOptions = None
self.recognizer_options: FindChoicesOptions = None
if choice_defaults is not None:
self._default_choice_options = choice_defaults
async def on_prompt(
self,
turn_context: TurnContext,
state: Dict[str, object],
options: PromptOptions,
is_retry: bool,
):
if not turn_context:
raise TypeError("ChoicePrompt.on_prompt(): turn_context cannot be None.")
if not options:
raise TypeError("ChoicePrompt.on_prompt(): options cannot be None.")
# Determine culture
culture = self._determine_culture(turn_context.activity)
# Format prompt to send
choices: List[Choice] = options.choices if options.choices else []
channel_id: str = turn_context.activity.channel_id
choice_options: ChoiceFactoryOptions = (
self.choice_options
if self.choice_options
else self._default_choice_options[culture]
)
choice_style = (
0 if options.style == 0 else options.style if options.style else self.style
)
if is_retry and options.retry_prompt is not None:
prompt = self.append_choices(
options.retry_prompt, channel_id, choices, choice_style, choice_options
)
else:
prompt = self.append_choices(
options.prompt, channel_id, choices, choice_style, choice_options
)
# Send prompt
await turn_context.send_activity(prompt)
async def on_recognize(
self,
turn_context: TurnContext,
state: Dict[str, object],
options: PromptOptions,
) -> PromptRecognizerResult:
if not turn_context:
raise TypeError("ChoicePrompt.on_recognize(): turn_context cannot be None.")
choices: List[Choice] = options.choices if (options and options.choices) else []
result: PromptRecognizerResult = PromptRecognizerResult()
if turn_context.activity.type == ActivityTypes.message:
activity: Activity = turn_context.activity
utterance: str = activity.text
if not utterance:
return result
opt: FindChoicesOptions = (
self.recognizer_options
if self.recognizer_options
else FindChoicesOptions()
)
opt.locale = self._determine_culture(turn_context.activity, opt)
results = ChoiceRecognizers.recognize_choices(utterance, choices, opt)
if results is not None and results:
result.succeeded = True
result.value = results[0].resolution
return result
def _determine_culture(
self, activity: Activity, opt: FindChoicesOptions = FindChoicesOptions()
) -> str:
culture = (
PromptCultureModels.map_to_nearest_language(activity.locale)
or opt.locale
or self.default_locale
or PromptCultureModels.English.locale
)
if not culture or not self._default_choice_options.get(culture):
culture = PromptCultureModels.English.locale
return culture
|
botbuilder-python/libraries/botbuilder-dialogs/botbuilder/dialogs/prompts/choice_prompt.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-dialogs/botbuilder/dialogs/prompts/choice_prompt.py",
"repo_id": "botbuilder-python",
"token_count": 2279
}
| 406 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from copy import deepcopy
from typing import List
from botframework.connector.token_api.models import TokenExchangeRequest
from botbuilder.schema import (
Activity,
ActivityTypes,
ExpectedReplies,
DeliveryModes,
SignInConstants,
TokenExchangeInvokeRequest,
)
from botbuilder.core import BotAdapter, TurnContext, ExtendedUserTokenProvider
from botbuilder.core.card_factory import ContentTypes
from botbuilder.core.skills import SkillConversationIdFactoryOptions
from botbuilder.dialogs import (
Dialog,
DialogContext,
DialogEvents,
DialogReason,
DialogInstance,
)
from .begin_skill_dialog_options import BeginSkillDialogOptions
from .skill_dialog_options import SkillDialogOptions
class SkillDialog(Dialog):
SKILLCONVERSATIONIDSTATEKEY = (
"Microsoft.Bot.Builder.Dialogs.SkillDialog.SkillConversationId"
)
def __init__(self, dialog_options: SkillDialogOptions, dialog_id: str):
super().__init__(dialog_id)
if not dialog_options:
raise TypeError("SkillDialog.__init__(): dialog_options cannot be None.")
self.dialog_options = dialog_options
self._deliver_mode_state_key = "deliverymode"
async def begin_dialog(self, dialog_context: DialogContext, options: object = None):
"""
Method called when a new dialog has been pushed onto the stack and is being activated.
:param dialog_context: The dialog context for the current turn of conversation.
:param options: (Optional) additional argument(s) to pass to the dialog being started.
"""
dialog_args = self._validate_begin_dialog_args(options)
# Create deep clone of the original activity to avoid altering it before forwarding it.
skill_activity: Activity = deepcopy(dialog_args.activity)
# Apply conversation reference and common properties from incoming activity before sending.
TurnContext.apply_conversation_reference(
skill_activity,
TurnContext.get_conversation_reference(dialog_context.context.activity),
is_incoming=True,
)
# Store delivery mode in dialog state for later use.
dialog_context.active_dialog.state[
self._deliver_mode_state_key
] = dialog_args.activity.delivery_mode
# Create the conversationId and store it in the dialog context state so we can use it later
skill_conversation_id = await self._create_skill_conversation_id(
dialog_context.context, dialog_context.context.activity
)
dialog_context.active_dialog.state[
SkillDialog.SKILLCONVERSATIONIDSTATEKEY
] = skill_conversation_id
# Send the activity to the skill.
eoc_activity = await self._send_to_skill(
dialog_context.context, skill_activity, skill_conversation_id
)
if eoc_activity:
return await dialog_context.end_dialog(eoc_activity.value)
return self.end_of_turn
async def continue_dialog(self, dialog_context: DialogContext):
if not self._on_validate_activity(dialog_context.context.activity):
return self.end_of_turn
# Handle EndOfConversation from the skill (this will be sent to the this dialog by the SkillHandler if
# received from the Skill)
if dialog_context.context.activity.type == ActivityTypes.end_of_conversation:
return await dialog_context.end_dialog(
dialog_context.context.activity.value
)
# Create deep clone of the original activity to avoid altering it before forwarding it.
skill_activity = deepcopy(dialog_context.context.activity)
skill_activity.delivery_mode = dialog_context.active_dialog.state[
self._deliver_mode_state_key
]
# Just forward to the remote skill
skill_conversation_id = dialog_context.active_dialog.state[
SkillDialog.SKILLCONVERSATIONIDSTATEKEY
]
eoc_activity = await self._send_to_skill(
dialog_context.context, skill_activity, skill_conversation_id
)
if eoc_activity:
return await dialog_context.end_dialog(eoc_activity.value)
return self.end_of_turn
async def reprompt_dialog( # pylint: disable=unused-argument
self, context: TurnContext, instance: DialogInstance
):
# Create and send an event to the skill so it can resume the dialog.
reprompt_event = Activity(
type=ActivityTypes.event, name=DialogEvents.reprompt_dialog
)
# Apply conversation reference and common properties from incoming activity before sending.
TurnContext.apply_conversation_reference(
reprompt_event,
TurnContext.get_conversation_reference(context.activity),
is_incoming=True,
)
# connection Name is not applicable for a RePrompt, as we don't expect as OAuthCard in response.
skill_conversation_id = instance.state[SkillDialog.SKILLCONVERSATIONIDSTATEKEY]
await self._send_to_skill(context, reprompt_event, skill_conversation_id)
async def resume_dialog( # pylint: disable=unused-argument
self, dialog_context: "DialogContext", reason: DialogReason, result: object
):
await self.reprompt_dialog(dialog_context.context, dialog_context.active_dialog)
return self.end_of_turn
async def end_dialog(
self, context: TurnContext, instance: DialogInstance, reason: DialogReason
):
# Send of of conversation to the skill if the dialog has been cancelled.
if reason in (DialogReason.CancelCalled, DialogReason.ReplaceCalled):
activity = Activity(type=ActivityTypes.end_of_conversation)
# Apply conversation reference and common properties from incoming activity before sending.
TurnContext.apply_conversation_reference(
activity,
TurnContext.get_conversation_reference(context.activity),
is_incoming=True,
)
activity.channel_data = context.activity.channel_data
activity.additional_properties = context.activity.additional_properties
# connection Name is not applicable for an EndDialog, as we don't expect as OAuthCard in response.
skill_conversation_id = instance.state[
SkillDialog.SKILLCONVERSATIONIDSTATEKEY
]
await self._send_to_skill(context, activity, skill_conversation_id)
await super().end_dialog(context, instance, reason)
def _validate_begin_dialog_args(self, options: object) -> BeginSkillDialogOptions:
if not options:
raise TypeError("options cannot be None.")
dialog_args = BeginSkillDialogOptions.from_object(options)
if not dialog_args:
raise TypeError(
"SkillDialog: options object not valid as BeginSkillDialogOptions."
)
if not dialog_args.activity:
raise TypeError(
"SkillDialog: activity object in options as BeginSkillDialogOptions cannot be None."
)
return dialog_args
def _on_validate_activity(
self, activity: Activity # pylint: disable=unused-argument
) -> bool:
"""
Validates the activity sent during continue_dialog.
Override this method to implement a custom validator for the activity being sent during continue_dialog.
This method can be used to ignore activities of a certain type if needed.
If this method returns false, the dialog will end the turn without processing the activity.
"""
return True
async def _send_to_skill(
self, context: TurnContext, activity: Activity, skill_conversation_id: str
) -> Activity:
if activity.type == ActivityTypes.invoke:
# Force ExpectReplies for invoke activities so we can get the replies right away and send
# them back to the channel if needed. This makes sure that the dialog will receive the Invoke
# response from the skill and any other activities sent, including EoC.
activity.delivery_mode = DeliveryModes.expect_replies
# Always save state before forwarding
# (the dialog stack won't get updated with the skillDialog and things won't work if you don't)
await self.dialog_options.conversation_state.save_changes(context, True)
skill_info = self.dialog_options.skill
response = await self.dialog_options.skill_client.post_activity(
self.dialog_options.bot_id,
skill_info.app_id,
skill_info.skill_endpoint,
self.dialog_options.skill_host_endpoint,
skill_conversation_id,
activity,
)
# Inspect the skill response status
if not 200 <= response.status <= 299:
raise Exception(
f'Error invoking the skill id: "{skill_info.id}" at "{skill_info.skill_endpoint}"'
f" (status is {response.status}). \r\n {response.body}"
)
eoc_activity: Activity = None
if activity.delivery_mode == DeliveryModes.expect_replies and response.body:
# Process replies in the response.Body.
response.body: List[Activity]
response.body = ExpectedReplies().deserialize(response.body).activities
# Track sent invoke responses, so more than one is not sent.
sent_invoke_response = False
for from_skill_activity in response.body:
if from_skill_activity.type == ActivityTypes.end_of_conversation:
# Capture the EndOfConversation activity if it was sent from skill
eoc_activity = from_skill_activity
# The conversation has ended, so cleanup the conversation id
await self.dialog_options.conversation_id_factory.delete_conversation_reference(
skill_conversation_id
)
elif not sent_invoke_response and await self._intercept_oauth_cards(
context, from_skill_activity, self.dialog_options.connection_name
):
# Token exchange succeeded, so no oauthcard needs to be shown to the user
sent_invoke_response = True
else:
# If an invoke response has already been sent we should ignore future invoke responses as this
# represents a bug in the skill.
if from_skill_activity.type == ActivityTypes.invoke_response:
if sent_invoke_response:
continue
sent_invoke_response = True
# Send the response back to the channel.
await context.send_activity(from_skill_activity)
return eoc_activity
async def _create_skill_conversation_id(
self, context: TurnContext, activity: Activity
) -> str:
# Create a conversationId to interact with the skill and send the activity
conversation_id_factory_options = SkillConversationIdFactoryOptions(
from_bot_oauth_scope=context.turn_state.get(BotAdapter.BOT_OAUTH_SCOPE_KEY),
from_bot_id=self.dialog_options.bot_id,
activity=activity,
bot_framework_skill=self.dialog_options.skill,
)
skill_conversation_id = await self.dialog_options.conversation_id_factory.create_skill_conversation_id(
conversation_id_factory_options
)
return skill_conversation_id
async def _intercept_oauth_cards(
self, context: TurnContext, activity: Activity, connection_name: str
):
"""
Tells is if we should intercept the OAuthCard message.
"""
if not connection_name or not isinstance(
context.adapter, ExtendedUserTokenProvider
):
# The adapter may choose not to support token exchange, in which case we fallback to
# showing an oauth card to the user.
return False
oauth_card_attachment = next(
attachment
for attachment in activity.attachments
if attachment.content_type == ContentTypes.oauth_card
)
if oauth_card_attachment:
oauth_card = oauth_card_attachment.content
if (
oauth_card
and oauth_card.token_exchange_resource
and oauth_card.token_exchange_resource.uri
):
try:
result = await context.adapter.exchange_token(
turn_context=context,
connection_name=connection_name,
user_id=context.activity.from_property.id,
exchange_request=TokenExchangeRequest(
uri=oauth_card.token_exchange_resource.uri
),
)
if result and result.token:
# If token above is null, then SSO has failed and hence we return false.
# If not, send an invoke to the skill with the token.
return await self._send_token_exchange_invoke_to_skill(
activity,
oauth_card.token_exchange_resource.id,
oauth_card.connection_name,
result.token,
)
except:
# Failures in token exchange are not fatal. They simply mean that the user needs
# to be shown the OAuth card.
return False
return False
async def _send_token_exchange_invoke_to_skill(
self,
incoming_activity: Activity,
request_id: str,
connection_name: str,
token: str,
):
activity = incoming_activity.create_reply()
activity.type = ActivityTypes.invoke
activity.name = SignInConstants.token_exchange_operation_name
activity.value = TokenExchangeInvokeRequest(
id=request_id,
token=token,
connection_name=connection_name,
)
# route the activity to the skill
skill_info = self.dialog_options.skill
response = await self.dialog_options.skill_client.post_activity(
self.dialog_options.bot_id,
skill_info.app_id,
skill_info.skill_endpoint,
self.dialog_options.skill_host_endpoint,
incoming_activity.conversation.id,
activity,
)
# Check response status: true if success, false if failure
return response.is_successful_status_code()
|
botbuilder-python/libraries/botbuilder-dialogs/botbuilder/dialogs/skills/skill_dialog.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-dialogs/botbuilder/dialogs/skills/skill_dialog.py",
"repo_id": "botbuilder-python",
"token_count": 6260
}
| 407 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import unittest
import aiounittest
from botbuilder.dialogs.prompts import (
ActivityPrompt,
PromptOptions,
PromptValidatorContext,
)
from botbuilder.schema import Activity, ActivityTypes
from botbuilder.core import (
ConversationState,
MemoryStorage,
TurnContext,
MessageFactory,
)
from botbuilder.core.adapters import TestAdapter
from botbuilder.dialogs import DialogSet, DialogTurnStatus, DialogReason
async def validator(prompt_context: PromptValidatorContext):
tester = unittest.TestCase()
tester.assertTrue(prompt_context.attempt_count > 0)
activity = prompt_context.recognized.value
if activity.type == ActivityTypes.event:
if int(activity.value) == 2:
prompt_context.recognized.value = MessageFactory.text(str(activity.value))
return True
else:
await prompt_context.context.send_activity(
"Please send an 'event'-type Activity with a value of 2."
)
return False
class SimpleActivityPrompt(ActivityPrompt):
pass
class ActivityPromptTests(aiounittest.AsyncTestCase):
def test_activity_prompt_with_empty_id_should_fail(self):
empty_id = ""
with self.assertRaises(TypeError):
SimpleActivityPrompt(empty_id, validator)
def test_activity_prompt_with_none_id_should_fail(self):
none_id = None
with self.assertRaises(TypeError):
SimpleActivityPrompt(none_id, validator)
def test_activity_prompt_with_none_validator_should_fail(self):
none_validator = None
with self.assertRaises(TypeError):
SimpleActivityPrompt("EventActivityPrompt", none_validator)
async def test_basic_activity_prompt(self):
async def exec_test(turn_context: TurnContext):
dialog_context = await dialogs.create_context(turn_context)
results = await dialog_context.continue_dialog()
if results.status == DialogTurnStatus.Empty:
options = PromptOptions(
prompt=Activity(
type=ActivityTypes.message, text="please send an event."
)
)
await dialog_context.prompt("EventActivityPrompt", options)
elif results.status == DialogTurnStatus.Complete:
await turn_context.send_activity(results.result)
await convo_state.save_changes(turn_context)
# Initialize TestAdapter.
adapter = TestAdapter(exec_test)
# Create ConversationState with MemoryStorage and register the state as middleware.
convo_state = ConversationState(MemoryStorage())
# Create a DialogState property, DialogSet and AttachmentPrompt.
dialog_state = convo_state.create_property("dialog_state")
dialogs = DialogSet(dialog_state)
dialogs.add(SimpleActivityPrompt("EventActivityPrompt", validator))
event_activity = Activity(type=ActivityTypes.event, value=2)
step1 = await adapter.send("hello")
step2 = await step1.assert_reply("please send an event.")
step3 = await step2.send(event_activity)
await step3.assert_reply("2")
async def test_retry_activity_prompt(self):
async def exec_test(turn_context: TurnContext):
dialog_context = await dialogs.create_context(turn_context)
results = await dialog_context.continue_dialog()
if results.status == DialogTurnStatus.Empty:
options = PromptOptions(
prompt=Activity(
type=ActivityTypes.message, text="please send an event."
)
)
await dialog_context.prompt("EventActivityPrompt", options)
elif results.status == DialogTurnStatus.Complete:
await turn_context.send_activity(results.result)
await convo_state.save_changes(turn_context)
# Initialize TestAdapter.
adapter = TestAdapter(exec_test)
# Create ConversationState with MemoryStorage and register the state as middleware.
convo_state = ConversationState(MemoryStorage())
# Create a DialogState property, DialogSet and AttachmentPrompt.
dialog_state = convo_state.create_property("dialog_state")
dialogs = DialogSet(dialog_state)
dialogs.add(SimpleActivityPrompt("EventActivityPrompt", validator))
event_activity = Activity(type=ActivityTypes.event, value=2)
step1 = await adapter.send("hello")
step2 = await step1.assert_reply("please send an event.")
step3 = await step2.send("hello again")
step4 = await step3.assert_reply(
"Please send an 'event'-type Activity with a value of 2."
)
step5 = await step4.send(event_activity)
await step5.assert_reply("2")
async def test_activity_prompt_should_return_dialog_end_if_validation_failed(self):
async def exec_test(turn_context: TurnContext):
dialog_context = await dialogs.create_context(turn_context)
results = await dialog_context.continue_dialog()
if results.status == DialogTurnStatus.Empty:
options = PromptOptions(
prompt=Activity(
type=ActivityTypes.message, text="please send an event."
),
retry_prompt=Activity(
type=ActivityTypes.message, text="event not received."
),
)
await dialog_context.prompt("EventActivityPrompt", options)
elif results.status == DialogTurnStatus.Complete:
await turn_context.send_activity(results.result)
await convo_state.save_changes(turn_context)
async def aux_validator(prompt_context: PromptValidatorContext):
assert prompt_context, "Validator missing prompt_context"
return False
# Initialize TestAdapter.
adapter = TestAdapter(exec_test)
# Create ConversationState with MemoryStorage and register the state as middleware.
convo_state = ConversationState(MemoryStorage())
# Create a DialogState property, DialogSet and AttachmentPrompt.
dialog_state = convo_state.create_property("dialog_state")
dialogs = DialogSet(dialog_state)
dialogs.add(SimpleActivityPrompt("EventActivityPrompt", aux_validator))
step1 = await adapter.send("hello")
step2 = await step1.assert_reply("please send an event.")
step3 = await step2.send("test")
await step3.assert_reply("event not received.")
async def test_activity_prompt_resume_dialog_should_return_dialog_end(self):
async def exec_test(turn_context: TurnContext):
dialog_context = await dialogs.create_context(turn_context)
results = await dialog_context.continue_dialog()
if results.status == DialogTurnStatus.Empty:
options = PromptOptions(
prompt=Activity(
type=ActivityTypes.message, text="please send an event."
)
)
await dialog_context.prompt("EventActivityPrompt", options)
second_results = await event_prompt.resume_dialog(
dialog_context, DialogReason.NextCalled
)
assert (
second_results.status == DialogTurnStatus.Waiting
), "resume_dialog did not returned Dialog.EndOfTurn"
await convo_state.save_changes(turn_context)
async def aux_validator(prompt_context: PromptValidatorContext):
assert prompt_context, "Validator missing prompt_context"
return False
# Initialize TestAdapter.
adapter = TestAdapter(exec_test)
# Create ConversationState with MemoryStorage and register the state as middleware.
convo_state = ConversationState(MemoryStorage())
# Create a DialogState property, DialogSet and AttachmentPrompt.
dialog_state = convo_state.create_property("dialog_state")
dialogs = DialogSet(dialog_state)
event_prompt = SimpleActivityPrompt("EventActivityPrompt", aux_validator)
dialogs.add(event_prompt)
step1 = await adapter.send("hello")
step2 = await step1.assert_reply("please send an event.")
await step2.assert_reply("please send an event.")
async def test_activity_prompt_onerror_should_return_dialogcontext(self):
# Create ConversationState with MemoryStorage and register the state as middleware.
convo_state = ConversationState(MemoryStorage())
# Create a DialogState property, DialogSet and AttachmentPrompt.
dialog_state = convo_state.create_property("dialog_state")
dialogs = DialogSet(dialog_state)
dialogs.add(SimpleActivityPrompt("EventActivityPrompt", validator))
async def exec_test(turn_context: TurnContext):
dialog_context = await dialogs.create_context(turn_context)
results = await dialog_context.continue_dialog()
if results.status == DialogTurnStatus.Empty:
options = PromptOptions(
prompt=Activity(
type=ActivityTypes.message, text="please send an event."
)
)
try:
await dialog_context.prompt("EventActivityPrompt", options)
await dialog_context.prompt("Non existent id", options)
except Exception as err:
self.assertIsNotNone(
err.data["DialogContext"] # pylint: disable=no-member
)
self.assertEqual(
err.data["DialogContext"][ # pylint: disable=no-member
"active_dialog"
],
"EventActivityPrompt",
)
else:
raise Exception("Should have thrown an error.")
elif results.status == DialogTurnStatus.Complete:
await turn_context.send_activity(results.result)
await convo_state.save_changes(turn_context)
# Initialize TestAdapter.
adapter = TestAdapter(exec_test)
await adapter.send("hello")
async def test_activity_replace_dialog_onerror_should_return_dialogcontext(self):
# Create ConversationState with MemoryStorage and register the state as middleware.
convo_state = ConversationState(MemoryStorage())
# Create a DialogState property, DialogSet and AttachmentPrompt.
dialog_state = convo_state.create_property("dialog_state")
dialogs = DialogSet(dialog_state)
dialogs.add(SimpleActivityPrompt("EventActivityPrompt", validator))
async def exec_test(turn_context: TurnContext):
dialog_context = await dialogs.create_context(turn_context)
results = await dialog_context.continue_dialog()
if results.status == DialogTurnStatus.Empty:
options = PromptOptions(
prompt=Activity(
type=ActivityTypes.message, text="please send an event."
)
)
try:
await dialog_context.prompt("EventActivityPrompt", options)
await dialog_context.replace_dialog("Non existent id", options)
except Exception as err:
self.assertIsNotNone(
err.data["DialogContext"] # pylint: disable=no-member
)
else:
raise Exception("Should have thrown an error.")
elif results.status == DialogTurnStatus.Complete:
await turn_context.send_activity(results.result)
await convo_state.save_changes(turn_context)
# Initialize TestAdapter.
adapter = TestAdapter(exec_test)
await adapter.send("hello")
|
botbuilder-python/libraries/botbuilder-dialogs/tests/test_activity_prompt.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-dialogs/tests/test_activity_prompt.py",
"repo_id": "botbuilder-python",
"token_count": 5185
}
| 408 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import os
from setuptools import setup
VERSION = os.environ["packageVersion"] if "packageVersion" in os.environ else "4.16.0"
REQUIRES = [
"botbuilder-schema==4.16.0",
"botframework-connector==4.16.0",
"botbuilder-core==4.16.0",
"yarl>=1.8.1",
"aiohttp==3.9.3",
]
root = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(root, "botbuilder", "integration", "aiohttp", "about.py")) as f:
package_info = {}
info = f.read()
exec(info, package_info)
with open(os.path.join(root, "README.rst"), encoding="utf-8") as f:
long_description = f.read()
setup(
name=package_info["__title__"],
version=package_info["__version__"],
url=package_info["__uri__"],
author=package_info["__author__"],
description=package_info["__description__"],
keywords=[
"BotBuilderIntegrationAiohttp",
"bots",
"ai",
"botframework",
"botbuilder",
],
long_description=long_description,
long_description_content_type="text/x-rst",
license=package_info["__license__"],
packages=[
"botbuilder.integration.aiohttp",
"botbuilder.integration.aiohttp.skills",
"botbuilder.integration.aiohttp.streaming",
],
install_requires=REQUIRES,
classifiers=[
"Programming Language :: Python :: 3.7",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Development Status :: 5 - Production/Stable",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
],
)
|
botbuilder-python/libraries/botbuilder-integration-aiohttp/setup.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-integration-aiohttp/setup.py",
"repo_id": "botbuilder-python",
"token_count": 682
}
| 409 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from enum import Enum
class SignInConstants(str, Enum):
# Name for the signin invoke to verify the 6-digit authentication code as part of sign-in.
verify_state_operation_name = "signin/verifyState"
# Name for signin invoke to perform a token exchange.
token_exchange_operation_name = "signin/tokenExchange"
# The EventActivity name when a token is sent to the bot.
token_response_event_name = "tokens/response"
|
botbuilder-python/libraries/botbuilder-schema/botbuilder/schema/_sign_in_enums.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-schema/botbuilder/schema/_sign_in_enums.py",
"repo_id": "botbuilder-python",
"token_count": 161
}
| 410 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
"""
Base tests that all storage providers should implement in their own tests.
They handle the storage-based assertions, internally.
All tests return true if assertions pass to indicate that the code ran to completion, passing internal assertions.
Therefore, all tests using theses static tests should strictly check that the method returns true.
Note: Python cannot have dicts with properties with a None value like other SDKs can have properties with null values.
Because of this, StoreItem tests have "e_tag: *" where the tests in the other SDKs do not.
This has also caused us to comment out some parts of these tests where we assert that "e_tag"
is None for the same reason. A null e_tag should work just like a * e_tag when writing,
as far as the storage adapters are concerened, so this shouldn't cause issues.
:Example:
async def test_handle_null_keys_when_reading(self):
await reset()
test_ran = await StorageBaseTests.handle_null_keys_when_reading(get_storage())
assert test_ran
"""
import pytest
from botbuilder.azure import CosmosDbStorage
from botbuilder.core import (
ConversationState,
TurnContext,
MessageFactory,
MemoryStorage,
)
from botbuilder.core.adapters import TestAdapter
from botbuilder.dialogs import (
DialogSet,
DialogTurnStatus,
TextPrompt,
PromptValidatorContext,
WaterfallStepContext,
Dialog,
WaterfallDialog,
PromptOptions,
)
class StorageBaseTests:
# pylint: disable=pointless-string-statement
@staticmethod
async def return_empty_object_when_reading_unknown_key(storage) -> bool:
result = await storage.read(["unknown"])
assert result is not None
assert len(result) == 0
return True
@staticmethod
async def handle_null_keys_when_reading(storage) -> bool:
if isinstance(storage, (CosmosDbStorage, MemoryStorage)):
result = await storage.read(None)
assert len(result.keys()) == 0
# Catch-all
else:
with pytest.raises(Exception) as err:
await storage.read(None)
assert err.value.args[0] == "Keys are required when reading"
return True
@staticmethod
async def handle_null_keys_when_writing(storage) -> bool:
with pytest.raises(Exception) as err:
await storage.write(None)
assert err.value.args[0] == "Changes are required when writing"
return True
@staticmethod
async def does_not_raise_when_writing_no_items(storage) -> bool:
# noinspection PyBroadException
try:
await storage.write([])
except:
pytest.fail("Should not raise")
return True
@staticmethod
async def create_object(storage) -> bool:
store_items = {
"createPoco": {"id": 1},
"createPocoStoreItem": {"id": 2, "e_tag": "*"},
}
await storage.write(store_items)
read_store_items = await storage.read(store_items.keys())
assert store_items["createPoco"]["id"] == read_store_items["createPoco"]["id"]
assert (
store_items["createPocoStoreItem"]["id"]
== read_store_items["createPocoStoreItem"]["id"]
)
# If decided to validate e_tag integrity again, uncomment this code
# assert read_store_items["createPoco"]["e_tag"] is not None
assert read_store_items["createPocoStoreItem"]["e_tag"] is not None
return True
@staticmethod
async def handle_crazy_keys(storage) -> bool:
key = '!@#$%^&*()_+??><":QASD~`'
store_item = {"id": 1}
store_items = {key: store_item}
await storage.write(store_items)
read_store_items = await storage.read(store_items.keys())
assert read_store_items[key] is not None
assert read_store_items[key]["id"] == 1
return True
@staticmethod
async def update_object(storage) -> bool:
original_store_items = {
"pocoItem": {"id": 1, "count": 1},
"pocoStoreItem": {"id": 1, "count": 1, "e_tag": "*"},
}
# 1st write should work
await storage.write(original_store_items)
loaded_store_items = await storage.read(["pocoItem", "pocoStoreItem"])
update_poco_item = loaded_store_items["pocoItem"]
update_poco_item["e_tag"] = None
update_poco_store_item = loaded_store_items["pocoStoreItem"]
assert update_poco_store_item["e_tag"] is not None
# 2nd write should work
update_poco_item["count"] += 1
update_poco_store_item["count"] += 1
await storage.write(loaded_store_items)
reloaded_store_items = await storage.read(loaded_store_items.keys())
reloaded_update_poco_item = reloaded_store_items["pocoItem"]
reloaded_update_poco_store_item = reloaded_store_items["pocoStoreItem"]
assert reloaded_update_poco_item["count"] == 2
assert reloaded_update_poco_store_item["count"] == 2
# Write with old e_tag should succeed for non-storeItem
update_poco_item["count"] = 123
await storage.write({"pocoItem": update_poco_item})
# Write with old eTag should FAIL for storeItem
update_poco_store_item["count"] = 123
"""
This assert exists in the other SDKs but can't in python, currently
due to using "e_tag: *" above (see comment near the top of this file for details).
with pytest.raises(Exception) as err:
await storage.write({"pocoStoreItem": update_poco_store_item})
assert err.value is not None
"""
reloaded_store_items2 = await storage.read(["pocoItem", "pocoStoreItem"])
reloaded_poco_item2 = reloaded_store_items2["pocoItem"]
reloaded_poco_item2["e_tag"] = None
reloaded_poco_store_item2 = reloaded_store_items2["pocoStoreItem"]
assert reloaded_poco_item2["count"] == 123
assert reloaded_poco_store_item2["count"] == 2
# write with wildcard etag should work
reloaded_poco_item2["count"] = 100
reloaded_poco_store_item2["count"] = 100
reloaded_poco_store_item2["e_tag"] = "*"
wildcard_etag_dict = {
"pocoItem": reloaded_poco_item2,
"pocoStoreItem": reloaded_poco_store_item2,
}
await storage.write(wildcard_etag_dict)
reloaded_store_items3 = await storage.read(["pocoItem", "pocoStoreItem"])
assert reloaded_store_items3["pocoItem"]["count"] == 100
assert reloaded_store_items3["pocoStoreItem"]["count"] == 100
# Write with empty etag should not work
reloaded_store_items4 = await storage.read(["pocoStoreItem"])
reloaded_store_item4 = reloaded_store_items4["pocoStoreItem"]
assert reloaded_store_item4 is not None
reloaded_store_item4["e_tag"] = ""
dict2 = {"pocoStoreItem": reloaded_store_item4}
with pytest.raises(Exception) as err:
await storage.write(dict2)
assert err.value is not None
final_store_items = await storage.read(["pocoItem", "pocoStoreItem"])
assert final_store_items["pocoItem"]["count"] == 100
assert final_store_items["pocoStoreItem"]["count"] == 100
return True
@staticmethod
async def delete_object(storage) -> bool:
store_items = {"delete1": {"id": 1, "count": 1, "e_tag": "*"}}
await storage.write(store_items)
read_store_items = await storage.read(["delete1"])
assert read_store_items["delete1"]["e_tag"]
assert read_store_items["delete1"]["count"] == 1
await storage.delete(["delete1"])
reloaded_store_items = await storage.read(["delete1"])
assert reloaded_store_items.get("delete1", None) is None
return True
@staticmethod
async def delete_unknown_object(storage) -> bool:
# noinspection PyBroadException
try:
await storage.delete(["unknown_key"])
except:
pytest.fail("Should not raise")
return True
@staticmethod
async def perform_batch_operations(storage) -> bool:
await storage.write(
{
"batch1": {"count": 10},
"batch2": {"count": 20},
"batch3": {"count": 30},
}
)
result = await storage.read(["batch1", "batch2", "batch3"])
assert result.get("batch1", None) is not None
assert result.get("batch2", None) is not None
assert result.get("batch3", None) is not None
assert result["batch1"]["count"] == 10
assert result["batch2"]["count"] == 20
assert result["batch3"]["count"] == 30
"""
If decided to validate e_tag integrity aagain, uncomment this code
assert result["batch1"].get("e_tag", None) is not None
assert result["batch2"].get("e_tag", None) is not None
assert result["batch3"].get("e_tag", None) is not None
"""
await storage.delete(["batch1", "batch2", "batch3"])
result = await storage.read(["batch1", "batch2", "batch3"])
assert result.get("batch1", None) is None
assert result.get("batch2", None) is None
assert result.get("batch3", None) is None
return True
@staticmethod
async def proceeds_through_waterfall(storage) -> bool:
convo_state = ConversationState(storage)
dialog_state = convo_state.create_property("dialogState")
dialogs = DialogSet(dialog_state)
async def exec_test(turn_context: TurnContext) -> None:
dialog_context = await dialogs.create_context(turn_context)
await dialog_context.continue_dialog()
if not turn_context.responded:
await dialog_context.begin_dialog(WaterfallDialog.__name__)
await convo_state.save_changes(turn_context)
adapter = TestAdapter(exec_test)
async def prompt_validator(prompt_context: PromptValidatorContext):
result = prompt_context.recognized.value
if len(result) > 3:
succeeded_message = MessageFactory.text(
f"You got it at the {prompt_context.options.number_of_attempts}rd try!"
)
await prompt_context.context.send_activity(succeeded_message)
return True
reply = MessageFactory.text(
f"Please send a name that is longer than 3 characters. {prompt_context.options.number_of_attempts}"
)
await prompt_context.context.send_activity(reply)
return False
async def step_1(step_context: WaterfallStepContext) -> DialogTurnStatus:
assert isinstance(step_context.active_dialog.state["stepIndex"], int)
await step_context.context.send_activity("step1")
return Dialog.end_of_turn
async def step_2(step_context: WaterfallStepContext) -> None:
assert isinstance(step_context.active_dialog.state["stepIndex"], int)
await step_context.prompt(
TextPrompt.__name__,
PromptOptions(prompt=MessageFactory.text("Please type your name")),
)
async def step_3(step_context: WaterfallStepContext) -> DialogTurnStatus:
assert isinstance(step_context.active_dialog.state["stepIndex"], int)
await step_context.context.send_activity("step3")
return Dialog.end_of_turn
steps = [step_1, step_2, step_3]
dialogs.add(WaterfallDialog(WaterfallDialog.__name__, steps))
dialogs.add(TextPrompt(TextPrompt.__name__, prompt_validator))
step1 = await adapter.send("hello")
step2 = await step1.assert_reply("step1")
step3 = await step2.send("hello")
step4 = await step3.assert_reply("Please type your name") # None
step5 = await step4.send("hi")
step6 = await step5.assert_reply(
"Please send a name that is longer than 3 characters. 0"
)
step7 = await step6.send("hi")
step8 = await step7.assert_reply(
"Please send a name that is longer than 3 characters. 1"
)
step9 = await step8.send("hi")
step10 = await step9.assert_reply(
"Please send a name that is longer than 3 characters. 2"
)
step11 = await step10.send("Kyle")
step12 = await step11.assert_reply("You got it at the 3rd try!")
await step12.assert_reply("step3")
return True
|
botbuilder-python/libraries/botbuilder-testing/botbuilder/testing/storage_base_tests.py/0
|
{
"file_path": "botbuilder-python/libraries/botbuilder-testing/botbuilder/testing/storage_base_tests.py",
"repo_id": "botbuilder-python",
"token_count": 5257
}
| 411 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from msrest.pipeline import AsyncPipeline, AsyncHTTPPolicy, SansIOHTTPPolicy
from msrest.universal_http.async_requests import AsyncRequestsHTTPSender as Driver
from msrest.pipeline.async_requests import (
AsyncRequestsCredentialsPolicy,
AsyncPipelineRequestsHTTPSender,
)
from msrest.pipeline.universal import RawDeserializer
from .bot_framework_sdk_client_async import BotFrameworkConnectorConfiguration
class AsyncBfPipeline(AsyncPipeline):
def __init__(self, config: BotFrameworkConnectorConfiguration):
creds = config.credentials
policies = [
config.user_agent_policy, # UserAgent policy
RawDeserializer(), # Deserialize the raw bytes
config.http_logger_policy, # HTTP request/response log
] # type: List[Union[AsyncHTTPPolicy, SansIOHTTPPolicy]]
if creds:
if isinstance(creds, (AsyncHTTPPolicy, SansIOHTTPPolicy)):
policies.insert(1, creds)
else:
# Assume this is the old credentials class, and then requests. Wrap it.
policies.insert(1, AsyncRequestsCredentialsPolicy(creds))
sender = config.sender or AsyncPipelineRequestsHTTPSender(
config.driver or Driver(config)
)
super().__init__(policies, sender)
|
botbuilder-python/libraries/botframework-connector/botframework/connector/aiohttp_bf_pipeline.py/0
|
{
"file_path": "botbuilder-python/libraries/botframework-connector/botframework/connector/aiohttp_bf_pipeline.py",
"repo_id": "botbuilder-python",
"token_count": 549
}
| 412 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from logging import Logger
from ..bot_framework_sdk_client_async import BotFrameworkConnectorConfiguration
from ..http_client_factory import HttpClientFactory
from ._government_cloud_bot_framework_authentication import (
_GovernmentCloudBotFrameworkAuthentication,
)
from ._parameterized_bot_framework_authentication import (
_ParameterizedBotFrameworkAuthentication,
)
from ._public_cloud_bot_framework_authentication import (
_PublicCloudBotFrameworkAuthentication,
)
from .authentication_configuration import AuthenticationConfiguration
from .bot_framework_authentication import BotFrameworkAuthentication
from .government_constants import GovernmentConstants
from .password_service_client_credential_factory import (
PasswordServiceClientCredentialFactory,
)
from .service_client_credentials_factory import ServiceClientCredentialsFactory
class BotFrameworkAuthenticationFactory:
@staticmethod
def create(
*,
channel_service: str = None,
validate_authority: bool = False,
to_channel_from_bot_login_url: str = None,
to_channel_from_bot_oauth_scope: str = None,
to_bot_from_channel_token_issuer: str = None,
oauth_url: str = None,
to_bot_from_channel_open_id_metadata_url: str = None,
to_bot_from_emulator_open_id_metadata_url: str = None,
caller_id: str = None,
credential_factory: ServiceClientCredentialsFactory = PasswordServiceClientCredentialFactory(),
auth_configuration: AuthenticationConfiguration = AuthenticationConfiguration(),
http_client_factory: HttpClientFactory = None,
connector_client_configuration: BotFrameworkConnectorConfiguration = None,
logger: Logger = None
) -> BotFrameworkAuthentication:
"""
Creates the appropriate BotFrameworkAuthentication instance.
:param channel_service: The Channel Service.
:param validate_authority: The validate authority value to use.
:param to_channel_from_bot_login_url: The to Channel from bot login url.
:param to_channel_from_bot_oauth_scope: The to Channel from bot oauth scope.
:param to_bot_from_channel_token_issuer: The to bot from Channel Token Issuer.
:param oauth_url: The oAuth url.
:param to_bot_from_channel_open_id_metadata_url: The to bot from Channel Open Id Metadata url.
:param to_bot_from_emulator_open_id_metadata_url: The to bot from Emulator Open Id Metadata url.
:param caller_id: The Microsoft app password.
:param credential_factory: The ServiceClientCredentialsFactory to use to create credentials.
:param auth_configuration: The AuthenticationConfiguration to use.
:param http_client_factory: The HttpClientFactory to use for a skill BotFrameworkClient.
:param connector_client_configuration: Configuration to use custom http pipeline for the connector
:param logger: The Logger to use.
:return: A new BotFrameworkAuthentication instance.
"""
# pylint: disable=too-many-boolean-expressions
if (
to_channel_from_bot_login_url
or to_channel_from_bot_oauth_scope
or to_bot_from_channel_token_issuer
or oauth_url
or to_bot_from_channel_open_id_metadata_url
or to_bot_from_emulator_open_id_metadata_url
or caller_id
):
# if we have any of the 'parameterized' properties defined we'll assume this is the parameterized code
return _ParameterizedBotFrameworkAuthentication(
validate_authority,
to_channel_from_bot_login_url,
to_channel_from_bot_oauth_scope,
to_bot_from_channel_token_issuer,
oauth_url,
to_bot_from_channel_open_id_metadata_url,
to_bot_from_emulator_open_id_metadata_url,
caller_id,
credential_factory,
auth_configuration,
http_client_factory,
connector_client_configuration,
logger,
)
# else apply the built in default behavior, which is either the public cloud or the gov cloud
# depending on whether we have a channelService value present
if not channel_service:
return _PublicCloudBotFrameworkAuthentication(
credential_factory,
auth_configuration,
http_client_factory,
connector_client_configuration,
logger,
)
if channel_service == GovernmentConstants.CHANNEL_SERVICE:
return _GovernmentCloudBotFrameworkAuthentication(
credential_factory,
auth_configuration,
http_client_factory,
connector_client_configuration,
logger,
)
# The ChannelService value is used an indicator of which built in set of constants to use.
# If it is not recognized, a full configuration is expected.
raise ValueError("The provided channel_service value is not supported.")
|
botbuilder-python/libraries/botframework-connector/botframework/connector/auth/bot_framework_authentication_factory.py/0
|
{
"file_path": "botbuilder-python/libraries/botframework-connector/botframework/connector/auth/bot_framework_authentication_factory.py",
"repo_id": "botbuilder-python",
"token_count": 2054
}
| 413 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from logging import Logger
from msrest.authentication import Authentication
from .authentication_constants import AuthenticationConstants
from .government_constants import GovernmentConstants
from .microsoft_app_credentials import MicrosoftAppCredentials
from .microsoft_government_app_credentials import MicrosoftGovernmentAppCredentials
from .service_client_credentials_factory import ServiceClientCredentialsFactory
class PasswordServiceClientCredentialFactory(ServiceClientCredentialsFactory):
def __init__(
self,
app_id: str = None,
password: str = None,
tenant_id: str = None,
*,
logger: Logger = None
) -> None:
self.app_id = app_id
self.password = password
self.tenant_id = tenant_id
self._logger = logger
async def is_valid_app_id(self, app_id: str) -> bool:
return app_id == self.app_id
async def is_authentication_disabled(self) -> bool:
return not self.app_id
async def create_credentials(
self,
app_id: str,
oauth_scope: str,
login_endpoint: str,
validate_authority: bool,
) -> Authentication:
if await self.is_authentication_disabled():
return MicrosoftAppCredentials.empty()
if not await self.is_valid_app_id(app_id):
raise Exception("Invalid app_id")
credentials: MicrosoftAppCredentials
normalized_endpoint = login_endpoint.lower() if login_endpoint else ""
if normalized_endpoint.startswith(
AuthenticationConstants.TO_CHANNEL_FROM_BOT_LOGIN_URL_PREFIX
):
credentials = MicrosoftAppCredentials(
app_id, self.password, self.tenant_id, oauth_scope
)
elif normalized_endpoint.startswith(
GovernmentConstants.TO_CHANNEL_FROM_BOT_LOGIN_URL_PREFIX
):
credentials = MicrosoftGovernmentAppCredentials(
app_id,
self.password,
self.tenant_id,
oauth_scope,
)
else:
credentials = _PrivateCloudAppCredentials(
app_id,
self.password,
self.tenant_id,
oauth_scope,
login_endpoint,
validate_authority,
)
return credentials
class _PrivateCloudAppCredentials(MicrosoftAppCredentials):
def __init__(
self,
app_id: str,
password: str,
tenant_id: str,
oauth_scope: str,
oauth_endpoint: str,
validate_authority: bool,
):
super().__init__(
app_id, password, channel_auth_tenant=tenant_id, oauth_scope=oauth_scope
)
self.oauth_endpoint = oauth_endpoint
self._validate_authority = validate_authority
@property
def validate_authority(self):
return self._validate_authority
|
botbuilder-python/libraries/botframework-connector/botframework/connector/auth/password_service_client_credential_factory.py/0
|
{
"file_path": "botbuilder-python/libraries/botframework-connector/botframework/connector/auth/password_service_client_credential_factory.py",
"repo_id": "botbuilder-python",
"token_count": 1344
}
| 414 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from ... import models
class UserTokenOperations:
"""UserTokenOperations async operations.
You should not instantiate directly this class, but create a Client instance that will create it for you and
attach it as attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar api_version: The API version to use for the request. Constant value: "token".
"""
models = models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
self.api_version = "token"
async def get_token(
self,
user_id,
connection_name,
channel_id=None,
code=None,
*,
custom_headers=None,
raw=False,
**operation_config
):
"""
:param user_id:
:type user_id: str
:param connection_name:
:type connection_name: str
:param channel_id:
:type channel_id: str
:param code:
:type code: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: TokenResponse or ClientRawResponse if raw=true
:rtype: ~botframework.tokenapi.models.TokenResponse or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<botframework.tokenapi.models.ErrorResponseException>`
"""
# Construct URL
url = self.get_token.metadata["url"]
# Construct parameters
query_parameters = {}
query_parameters["userId"] = self._serialize.query("user_id", user_id, "str")
query_parameters["connectionName"] = self._serialize.query(
"connection_name", connection_name, "str"
)
if channel_id is not None:
query_parameters["channelId"] = self._serialize.query(
"channel_id", channel_id, "str"
)
if code is not None:
query_parameters["code"] = self._serialize.query("code", code, "str")
query_parameters["api-version"] = self._serialize.query(
"self.api_version", self.api_version, "str"
)
# Construct headers
header_parameters = {}
header_parameters["Accept"] = "application/json"
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = await self._client.async_send(
request, stream=False, **operation_config
)
if response.status_code not in [200, 404]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("TokenResponse", response)
if response.status_code == 404:
deserialized = self._deserialize("TokenResponse", response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_token.metadata = {"url": "/api/usertoken/GetToken"}
async def get_aad_tokens(
self,
user_id,
connection_name,
channel_id=None,
resource_urls=None,
*,
custom_headers=None,
raw=False,
**operation_config
):
"""
:param user_id:
:type user_id: str
:param connection_name:
:type connection_name: str
:param channel_id:
:type channel_id: str
:param resource_urls:
:type resource_urls: list[str]
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: dict or ClientRawResponse if raw=true
:rtype: dict[str, ~botframework.tokenapi.models.TokenResponse] or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<botframework.tokenapi.models.ErrorResponseException>`
"""
aad_resource_urls = models.AadResourceUrls(resource_urls=resource_urls)
# Construct URL
url = self.get_aad_tokens.metadata["url"]
# Construct parameters
query_parameters = {}
query_parameters["userId"] = self._serialize.query("user_id", user_id, "str")
query_parameters["connectionName"] = self._serialize.query(
"connection_name", connection_name, "str"
)
if channel_id is not None:
query_parameters["channelId"] = self._serialize.query(
"channel_id", channel_id, "str"
)
query_parameters["api-version"] = self._serialize.query(
"self.api_version", self.api_version, "str"
)
# Construct headers
header_parameters = {}
header_parameters["Accept"] = "application/json"
header_parameters["Content-Type"] = "application/json; charset=utf-8"
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(aad_resource_urls, "AadResourceUrls")
# Construct and send request
request = self._client.post(
url, query_parameters, header_parameters, body_content
)
response = await self._client.async_send(
request, stream=False, **operation_config
)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("{TokenResponse}", response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_aad_tokens.metadata = {"url": "/api/usertoken/GetAadTokens"}
async def sign_out(
self,
user_id,
connection_name=None,
channel_id=None,
*,
custom_headers=None,
raw=False,
**operation_config
):
"""
:param user_id:
:type user_id: str
:param connection_name:
:type connection_name: str
:param channel_id:
:type channel_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<botframework.tokenapi.models.ErrorResponseException>`
"""
# Construct URL
url = self.sign_out.metadata["url"]
# Construct parameters
query_parameters = {}
query_parameters["userId"] = self._serialize.query("user_id", user_id, "str")
if connection_name is not None:
query_parameters["connectionName"] = self._serialize.query(
"connection_name", connection_name, "str"
)
if channel_id is not None:
query_parameters["channelId"] = self._serialize.query(
"channel_id", channel_id, "str"
)
query_parameters["api-version"] = self._serialize.query(
"self.api_version", self.api_version, "str"
)
# Construct headers
header_parameters = {}
header_parameters["Accept"] = "application/json"
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = await self._client.async_send(
request, stream=False, **operation_config
)
if response.status_code not in [200, 204]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("object", response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
sign_out.metadata = {"url": "/api/usertoken/SignOut"}
async def get_token_status(
self,
user_id,
channel_id=None,
include=None,
*,
custom_headers=None,
raw=False,
**operation_config
):
"""
:param user_id:
:type user_id: str
:param channel_id:
:type channel_id: str
:param include:
:type include: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: list or ClientRawResponse if raw=true
:rtype: list[~botframework.tokenapi.models.TokenStatus] or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<botframework.tokenapi.models.ErrorResponseException>`
"""
# Construct URL
url = self.get_token_status.metadata["url"]
# Construct parameters
query_parameters = {}
query_parameters["userId"] = self._serialize.query("user_id", user_id, "str")
if channel_id is not None:
query_parameters["channelId"] = self._serialize.query(
"channel_id", channel_id, "str"
)
if include is not None:
query_parameters["include"] = self._serialize.query(
"include", include, "str"
)
query_parameters["api-version"] = self._serialize.query(
"self.api_version", self.api_version, "str"
)
# Construct headers
header_parameters = {}
header_parameters["Accept"] = "application/json"
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = await self._client.async_send(
request, stream=False, **operation_config
)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("[TokenStatus]", response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_token_status.metadata = {"url": "/api/usertoken/GetTokenStatus"}
async def exchange_async(
self,
user_id,
connection_name,
channel_id,
uri=None,
token=None,
*,
custom_headers=None,
raw=False,
**operation_config
):
"""
:param user_id:
:type user_id: str
:param connection_name:
:type connection_name: str
:param channel_id:
:type channel_id: str
:param uri:
:type uri: str
:param token:
:type token: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<botframework.tokenapi.models.ErrorResponseException>`
"""
exchange_request = models.TokenExchangeRequest(uri=uri, token=token)
# Construct URL
url = self.exchange_async.metadata["url"]
# Construct parameters
query_parameters = {}
query_parameters["userId"] = self._serialize.query("user_id", user_id, "str")
query_parameters["connectionName"] = self._serialize.query(
"connection_name", connection_name, "str"
)
query_parameters["channelId"] = self._serialize.query(
"channel_id", channel_id, "str"
)
# Construct headers
header_parameters = {}
header_parameters["Accept"] = "application/json"
header_parameters["Content-Type"] = "application/json; charset=utf-8"
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(exchange_request, "TokenExchangeRequest")
# Construct and send request
request = self._client.post(
url, query_parameters, header_parameters, body_content
)
response = await self._client.async_send(
request, stream=False, **operation_config
)
if response.status_code not in [200, 400, 404]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("TokenResponse", response)
if response.status_code == 400:
deserialized = self._deserialize("ErrorResponse", response)
if response.status_code == 404:
deserialized = self._deserialize("TokenResponse", response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
exchange_async.metadata = {"url": "/api/usertoken/exchange"}
|
botbuilder-python/libraries/botframework-connector/botframework/connector/token_api/aio/operations_async/_user_token_operations_async.py/0
|
{
"file_path": "botbuilder-python/libraries/botframework-connector/botframework/connector/token_api/aio/operations_async/_user_token_operations_async.py",
"repo_id": "botbuilder-python",
"token_count": 6292
}
| 415 |
interactions:
- request:
body: null
headers:
Accept: [application/json]
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
User-Agent: [python/3.6.2 (Windows-10-10.0.16299-SP0) requests/2.18.1 msrest/0.4.23
azure-botframework-connector/3.0]
method: GET
uri: https://slack.botframework.com/v3/attachments/bt13796-GJS4yaxDLI
response:
body: {string: '"NotFound"'}
headers:
cache-control: [no-cache]
content-length: ['10']
content-type: [application/json; charset=utf-8]
date: ['Fri, 29 Dec 2017 18:24:36 GMT']
expires: ['-1']
pragma: [no-cache]
request-context: ['appId=cid-v1:6814484e-c0d5-40ea-9dba-74ff29ca4f62']
server: [Microsoft-IIS/10.0]
strict-transport-security: [max-age=31536000]
x-powered-by: [ASP.NET]
status: {code: 404, message: Not Found}
version: 1
|
botbuilder-python/libraries/botframework-connector/tests/recordings/test_attachments_get_info_invalid_attachment_id_fails.yaml/0
|
{
"file_path": "botbuilder-python/libraries/botframework-connector/tests/recordings/test_attachments_get_info_invalid_attachment_id_fails.yaml",
"repo_id": "botbuilder-python",
"token_count": 445
}
| 416 |
interactions:
- request:
body: '{"type": "message", "channelId": "slack", "from": {"id": "B21UTEF8S:T03CWQ0QB"},
"recipient": {"id": "U19KH8EHJ:T03CWQ0QB"}, "textFormat": "markdown", "attachmentLayout":
"list", "text": "Thread activity"}'
headers:
Accept: [application/json]
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Length: ['205']
Content-Type: [application/json; charset=utf-8]
User-Agent: [python/3.6.2 (Windows-10-10.0.16299-SP0) requests/2.18.1 msrest/0.4.23
azure-botframework-connector/3.0]
method: POST
uri: https://slack.botframework.com/v3/conversations/B21UTEF8S%3AT03CWQ0QB%3AD2369CT7C/activities
response:
body: {string: "{\r\n \"id\": \"1514570384.000325\"\r\n}"}
headers:
cache-control: [no-cache]
content-length: ['33']
content-type: [application/json; charset=utf-8]
date: ['Fri, 29 Dec 2017 17:59:44 GMT']
expires: ['-1']
pragma: [no-cache]
request-context: ['appId=cid-v1:6814484e-c0d5-40ea-9dba-74ff29ca4f62']
server: [Microsoft-IIS/10.0]
strict-transport-security: [max-age=31536000]
vary: [Accept-Encoding]
x-powered-by: [ASP.NET]
status: {code: 200, message: OK}
- request:
body: '{"type": "message", "channelId": "slack", "from": {"id": "B21UTEF8S:T03CWQ0QB"},
"recipient": {"id": "U19KH8EHJ:T03CWQ0QB"}, "textFormat": "markdown", "attachmentLayout":
"list", "text": "Child activity."}'
headers:
Accept: [application/json]
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Length: ['205']
Content-Type: [application/json; charset=utf-8]
User-Agent: [python/3.6.2 (Windows-10-10.0.16299-SP0) requests/2.18.1 msrest/0.4.23
azure-botframework-connector/3.0]
method: POST
uri: https://slack.botframework.com/v3/conversations/B21UTEF8S%3AT03CWQ0QB%3AD2369CT7C/activities/INVALID_ID
response:
body: {string: "{\r\n \"id\": \"1514570386.000010\"\r\n}"}
headers:
cache-control: [no-cache]
content-length: ['33']
content-type: [application/json; charset=utf-8]
date: ['Fri, 29 Dec 2017 17:59:46 GMT']
expires: ['-1']
pragma: [no-cache]
request-context: ['appId=cid-v1:6814484e-c0d5-40ea-9dba-74ff29ca4f62']
server: [Microsoft-IIS/10.0]
strict-transport-security: [max-age=31536000]
vary: [Accept-Encoding]
x-powered-by: [ASP.NET]
status: {code: 200, message: OK}
version: 1
|
botbuilder-python/libraries/botframework-connector/tests/recordings/test_conversations_reply_to_activity_with_invalid_activity_id_fails.yaml/0
|
{
"file_path": "botbuilder-python/libraries/botframework-connector/tests/recordings/test_conversations_reply_to_activity_with_invalid_activity_id_fails.yaml",
"repo_id": "botbuilder-python",
"token_count": 1195
}
| 417 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import pytest
from botframework.connector.auth import EndorsementsValidator
class TestEndorsmentsValidator:
def test_none_channel_id_parameter_should_pass(self):
is_endorsed = EndorsementsValidator.validate(None, [])
assert is_endorsed
def test_none_endorsements_parameter_should_throw(self):
with pytest.raises(ValueError) as excinfo:
EndorsementsValidator.validate("foo", None)
assert "endorsements" in excinfo
def test_unendorsed_channel_id_should_fail(self):
is_endorsed = EndorsementsValidator.validate("channelOne", [])
assert not is_endorsed
def test_mismatched_endorsements_channel_id_should_fail(self):
is_endorsed = EndorsementsValidator.validate("right", ["wrong"])
assert not is_endorsed
def test_endorsed_channel_id_should_pass(self):
is_endorsed = EndorsementsValidator.validate("right", ["right"])
assert is_endorsed
def test_endorsed_channel_id_should_pass_with_two_endorsements(self):
is_endorsed = EndorsementsValidator.validate("right", ["right", "wrong"])
assert is_endorsed
def test_unaffinitized_activity_should_pass(self):
is_endorsed = EndorsementsValidator.validate("", ["right", "wrong"])
assert is_endorsed
|
botbuilder-python/libraries/botframework-connector/tests/test_endorsements_validator.py/0
|
{
"file_path": "botbuilder-python/libraries/botframework-connector/tests/test_endorsements_validator.py",
"repo_id": "botbuilder-python",
"token_count": 543
}
| 418 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import asyncio
from uuid import UUID
from typing import Awaitable, Callable, List
import botframework.streaming as streaming
import botframework.streaming.payloads as payloads
from botframework.streaming.payloads.models import Header, RequestPayload
from .assembler import Assembler
class ReceiveRequestAssembler(Assembler):
# pylint: disable=super-init-not-called
def __init__(
self,
header: Header,
stream_manager: "payloads.StreamManager",
on_completed: Callable[[UUID, "streaming.ReceiveRequest"], Awaitable],
):
if not header:
raise TypeError(
f"'header: {header.__class__.__name__}' argument can't be None"
)
if not on_completed:
raise TypeError(f"'on_completed' argument can't be None")
self._stream_manager = stream_manager
self._on_completed = on_completed
self.identifier = header.id
self._length = header.payload_length if header.end else None
self._stream: List[int] = None
def create_stream_from_payload(self) -> List[int]:
return [None] * (self._length or 0)
def get_payload_as_stream(self) -> List[int]:
if self._stream is None:
self._stream = self.create_stream_from_payload()
return self._stream
def on_receive(self, header: Header, stream: List[int], content_length: int):
if header.end:
self.end = True
# Execute the request in the background
asyncio.ensure_future(self.process_request(stream))
def close(self):
self._stream_manager.close_stream(self.identifier)
async def process_request(self, stream: List[int]):
request_payload = RequestPayload().from_json(bytes(stream).decode("utf-8-sig"))
request = streaming.ReceiveRequest(
verb=request_payload.verb, path=request_payload.path, streams=[]
)
if request_payload.streams:
for stream_description in request_payload.streams:
try:
identifier = UUID(stream_description.id)
except Exception:
raise ValueError(
f"Stream description id '{stream_description.id}' is not a Guid"
)
stream_assembler = self._stream_manager.get_payload_assembler(
identifier
)
stream_assembler.content_type = stream_description.content_type
stream_assembler.content_length = stream_description.length
content_stream = payloads.ContentStream(
identifier=identifier, assembler=stream_assembler
)
content_stream.length = stream_description.length
content_stream.content_type = stream_description.content_type
request.streams.append(content_stream)
await self._on_completed(self.identifier, request)
|
botbuilder-python/libraries/botframework-streaming/botframework/streaming/payloads/assemblers/receive_request_assembler.py/0
|
{
"file_path": "botbuilder-python/libraries/botframework-streaming/botframework/streaming/payloads/assemblers/receive_request_assembler.py",
"repo_id": "botbuilder-python",
"token_count": 1313
}
| 419 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import json
from .serializable import Serializable
class StreamDescription(Serializable):
# pylint: disable=invalid-name
def __init__(self, *, id: str = None, content_type: str = None, length: int = None):
self.id = id
self.content_type = content_type
self.length = length
def to_dict(self) -> dict:
obj = {"id": self.id, "type": self.content_type}
if self.length is not None:
obj["length"] = self.length
return obj
def from_dict(self, json_dict: dict) -> "StreamDescription":
self.id = json_dict.get("id")
self.content_type = json_dict.get("type")
self.length = json_dict.get("length")
return self
def to_json(self) -> str:
return json.dumps(self.to_dict)
def from_json(self, json_str: str) -> "StreamDescription":
obj = json.loads(json_str)
return self.from_dict(obj)
|
botbuilder-python/libraries/botframework-streaming/botframework/streaming/payloads/models/stream_description.py/0
|
{
"file_path": "botbuilder-python/libraries/botframework-streaming/botframework/streaming/payloads/models/stream_description.py",
"repo_id": "botbuilder-python",
"token_count": 405
}
| 420 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from abc import ABC
class TransportConstants(ABC):
MAX_PAYLOAD_LENGTH = 4096
MAX_HEADER_LENGTH = 48
MAX_LENGTH = 999999
MIN_LENGTH = 0
|
botbuilder-python/libraries/botframework-streaming/botframework/streaming/transport/transport_constants.py/0
|
{
"file_path": "botbuilder-python/libraries/botframework-streaming/botframework/streaming/transport/transport_constants.py",
"repo_id": "botbuilder-python",
"token_count": 85
}
| 421 |
from typing import List
import aiounittest
from botframework.streaming import PayloadStream
from botframework.streaming.payload_transport import PayloadReceiver
from botframework.streaming.transport import TransportReceiverBase
class MockTransportReceiver(TransportReceiverBase):
# pylint: disable=unused-argument
def __init__(self, mock_header: bytes, mock_payload: bytes):
self._is_connected = True
self._mock_gen = self._mock_receive(mock_header, mock_payload)
def _mock_receive(self, mock_header: bytes, mock_payload: bytes):
yield mock_header
yield mock_payload
@property
def is_connected(self):
if self._is_connected:
self._is_connected = False
return True
return False
async def close(self):
return
async def receive(self, buffer: object, offset: int, count: int) -> int:
resp_buffer = list(next(self._mock_gen))
for index, val in enumerate(resp_buffer):
buffer[index] = val
return len(resp_buffer)
class MockStream(PayloadStream):
# pylint: disable=super-init-not-called
def __init__(self):
self.buffer = None
self._producer_length = 0 # total length
def give_buffer(self, buffer: List[int]):
self.buffer = buffer
class TestBotFrameworkHttpClient(aiounittest.AsyncTestCase):
async def test_connect(self):
mock_header = b"S.000004.e35ed534-0808-4acf-af1e-24aa81d2b31d.1\n"
mock_payload = b"test"
mock_receiver = MockTransportReceiver(mock_header, mock_payload)
mock_stream = MockStream()
receive_action_called = False
def mock_get_stream(header): # pylint: disable=unused-argument
return mock_stream
def mock_receive_action(header, stream, offset):
nonlocal receive_action_called
assert header.type == "S"
assert len(stream.buffer) == offset
receive_action_called = True
sut = PayloadReceiver()
sut.subscribe(mock_get_stream, mock_receive_action)
await sut.connect(mock_receiver)
assert bytes(mock_stream.buffer) == mock_payload
assert receive_action_called
|
botbuilder-python/libraries/botframework-streaming/tests/test_payload_receiver.py/0
|
{
"file_path": "botbuilder-python/libraries/botframework-streaming/tests/test_payload_receiver.py",
"repo_id": "botbuilder-python",
"token_count": 913
}
| 422 |
@echo off
rd /s /q generated
echo [91mWARNING:[0m There is manual code for lines 127-130 in attachments_operations_async.py and lines 12-26 in the connector_client.py.
echo [91mCalling this command script has removed those sections of code.[0m
@echo on
call npx autorest README.md --python --use=".\node_modules\@microsoft.azure\autorest.python"
@echo off
pushd generated
call npx replace "query_parameters\['api-version'\][^\n]+\n" "" . --recursive --include="*.py"
popd
rd /s /q ..\botbuilder-schema\botbuilder\schema
rd /s /q ..\botframework-connector\botframework\connector\operations
rd /s /q ..\botframework-connector\botframework\connector\aio
del ..\botframework-connector\botframework\connector\connector_client.py
move generated\botframework\connector\models ..\botbuilder-schema\botbuilder\schema
move generated\botframework\connector\operations ..\botframework-connector\botframework\connector\operations
move generated\botframework\connector\aio ..\botframework-connector\botframework\connector\aio
move generated\botframework\connector\_connector_client.py ..\botframework-connector\botframework\connector\connector_client.py
move generated\botframework\connector\version.py ..\botframework-connector\botframework\connector\version.py
move generated\botframework\connector\_configuration.py ..\botframework-connector\botframework\connector\_configuration.py
@echo on
call npx autorest tokenAPI.md --python --use=".\node_modules\@microsoft.azure\autorest.python"
@echo off
echo [92mMove tokenAPI to botframework-connector[0m
rd /s /q ..\botframework-connector\botframework\connector\token_api
move tokenApi\botframework\tokenApi ..\botframework-connector\botframework\connector\token_api
echo [92mRemoving generated folders ("generated/", "tokenApi/")[0m
rd /s /q tokenApi
|
botbuilder-python/swagger/generateClient.cmd/0
|
{
"file_path": "botbuilder-python/swagger/generateClient.cmd",
"repo_id": "botbuilder-python",
"token_count": 581
}
| 423 |
from uuid import uuid4
from datetime import datetime
from http import HTTPStatus
from typing import List
from botbuilder.core import (
ActivityHandler,
BotFrameworkAdapter,
BotFrameworkHttpClient,
CardFactory,
ConversationState,
UserState,
MessageFactory,
TurnContext,
)
from botbuilder.schema import (
Activity,
ActivityTypes,
ConversationAccount,
DeliveryModes,
ChannelAccount,
OAuthCard,
TokenExchangeInvokeRequest,
)
from botframework.connector.token_api.models import (
TokenExchangeResource,
TokenExchangeRequest,
)
from config import DefaultConfig
from helpers.dialog_helper import DialogHelper
from dialogs import MainDialog
class ParentBot(ActivityHandler):
def __init__(
self,
skill_client: BotFrameworkHttpClient,
config: DefaultConfig,
dialog: MainDialog,
conversation_state: ConversationState,
user_state: UserState,
):
self._client = skill_client
self._conversation_state = conversation_state
self._user_state = user_state
self._dialog = dialog
self._from_bot_id = config.APP_ID
self._to_bot_id = config.SKILL_MICROSOFT_APP_ID
self._connection_name = config.CONNECTION_NAME
async def on_turn(self, turn_context: TurnContext):
await super().on_turn(turn_context)
await self._conversation_state.save_changes(turn_context)
await self._user_state.save_changes(turn_context)
async def on_message_activity(self, turn_context: TurnContext):
# for signin, just use an oauth prompt to get the exchangeable token
# also ensure that the channelId is not emulator
if turn_context.activity.type != "emulator":
if (
turn_context.activity.text == "login"
or turn_context.activity.text.isdigit()
):
await self._conversation_state.load(turn_context, True)
await self._user_state.load(turn_context, True)
await DialogHelper.run_dialog(
self._dialog,
turn_context,
self._conversation_state.create_property("DialogState"),
)
elif turn_context.activity.text == "logout":
bot_adapter = turn_context.adapter
await bot_adapter.sign_out_user(turn_context, self._connection_name)
await turn_context.send_activity(
MessageFactory.text("You have been signed out.")
)
elif turn_context.activity.text in ("skill login", "skill logout"):
# incoming activity needs to be cloned for buffered replies
clone_activity = MessageFactory.text(turn_context.activity.text)
TurnContext.apply_conversation_reference(
clone_activity,
TurnContext.get_conversation_reference(turn_context.activity),
True,
)
clone_activity.delivery_mode = DeliveryModes.expect_replies
activities = await self._client.post_buffered_activity(
self._from_bot_id,
self._to_bot_id,
"http://localhost:3979/api/messages",
"http://tempuri.org/whatever",
turn_context.activity.conversation.id,
clone_activity,
)
if activities:
if not await self._intercept_oauth_cards(
activities, turn_context
):
await turn_context.send_activities(activities)
return
await turn_context.send_activity(MessageFactory.text("parent: before child"))
activity = MessageFactory.text("parent: before child")
TurnContext.apply_conversation_reference(
activity,
TurnContext.get_conversation_reference(turn_context.activity),
True,
)
activity.delivery_mode = DeliveryModes.expect_replies
activities = await self._client.post_buffered_activity(
self._from_bot_id,
self._to_bot_id,
"http://localhost:3979/api/messages",
"http://tempuri.org/whatever",
str(uuid4()),
activity,
)
await turn_context.send_activities(activities)
await turn_context.send_activity(MessageFactory.text("parent: after child"))
async def on_members_added_activity(
self, members_added: List[ChannelAccount], turn_context: TurnContext
):
for member in members_added:
if member.id != turn_context.activity.recipient.id:
await turn_context.send_activity(
MessageFactory.text("Hello and welcome!")
)
async def _intercept_oauth_cards(
self, activities: List[Activity], turn_context: TurnContext,
) -> bool:
if not activities:
return False
activity = activities[0]
if activity.attachments:
for attachment in filter(
lambda att: att.content_type == CardFactory.content_types.oauth_card,
activity.attachments,
):
oauth_card: OAuthCard = OAuthCard().from_dict(attachment.content)
oauth_card.token_exchange_resource: TokenExchangeResource = TokenExchangeResource().from_dict(
oauth_card.token_exchange_resource
)
if oauth_card.token_exchange_resource:
token_exchange_provider: BotFrameworkAdapter = turn_context.adapter
result = await token_exchange_provider.exchange_token(
turn_context,
self._connection_name,
turn_context.activity.from_property.id,
TokenExchangeRequest(
uri=oauth_card.token_exchange_resource.uri
),
)
if result.token:
return await self._send_token_exchange_invoke_to_skill(
turn_context,
activity,
oauth_card.token_exchange_resource.id,
result.token,
)
return False
async def _send_token_exchange_invoke_to_skill(
self,
turn_context: TurnContext,
incoming_activity: Activity,
identifier: str,
token: str,
) -> bool:
activity = self._create_reply(incoming_activity)
activity.type = ActivityTypes.invoke
activity.name = "signin/tokenExchange"
activity.value = TokenExchangeInvokeRequest(id=identifier, token=token,)
# route the activity to the skill
response = await self._client.post_activity(
self._from_bot_id,
self._to_bot_id,
"http://localhost:3979/api/messages",
"http://tempuri.org/whatever",
incoming_activity.conversation.id,
activity,
)
# Check response status: true if success, false if failure
is_success = int(HTTPStatus.OK) <= response.status <= 299
message = (
"Skill token exchange successful"
if is_success
else "Skill token exchange failed"
)
await turn_context.send_activity(MessageFactory.text(message))
return is_success
def _create_reply(self, activity) -> Activity:
return Activity(
type=ActivityTypes.message,
timestamp=datetime.utcnow(),
from_property=ChannelAccount(
id=activity.recipient.id, name=activity.recipient.name
),
recipient=ChannelAccount(
id=activity.from_property.id, name=activity.from_property.name
),
reply_to_id=activity.id,
service_url=activity.service_url,
channel_id=activity.channel_id,
conversation=ConversationAccount(
is_group=activity.conversation.is_group,
id=activity.conversation.id,
name=activity.conversation.name,
),
text="",
locale=activity.locale,
)
|
botbuilder-python/tests/experimental/sso/parent/bots/parent_bot.py/0
|
{
"file_path": "botbuilder-python/tests/experimental/sso/parent/bots/parent_bot.py",
"repo_id": "botbuilder-python",
"token_count": 4008
}
| 424 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import asyncio
import sys
from types import MethodType
from flask import Flask, Response, request
from botbuilder.core import (
BotFrameworkAdapter,
BotFrameworkAdapterSettings,
MessageFactory,
TurnContext,
)
from botbuilder.schema import Activity, InputHints
from .default_config import DefaultConfig
from .my_bot import MyBot
class BotApp:
"""A Flask echo bot."""
def __init__(self):
# Create the loop and Flask app
self.loop = asyncio.get_event_loop()
self.flask = Flask(__name__, instance_relative_config=True)
self.flask.config.from_object(DefaultConfig)
# Create adapter.
# See https://aka.ms/about-bot-adapter to learn more about how bots work.
self.settings = BotFrameworkAdapterSettings(
self.flask.config["APP_ID"], self.flask.config["APP_PASSWORD"]
)
self.adapter = BotFrameworkAdapter(self.settings)
# Catch-all for errors.
async def on_error(adapter, context: TurnContext, error: Exception):
# This check writes out errors to console log .vs. app insights.
# NOTE: In production environment, you should consider logging this to Azure
# application insights.
print(f"\n [on_turn_error]: {error}", file=sys.stderr)
# Send a message to the user
error_message_text = "Sorry, it looks like something went wrong."
error_message = MessageFactory.text(
error_message_text, error_message_text, InputHints.expecting_input
)
await context.send_activity(error_message)
# pylint: disable=protected-access
if adapter._conversation_state:
# If state was defined, clear it.
await adapter._conversation_state.delete(context)
self.adapter.on_turn_error = MethodType(on_error, self.adapter)
# Create the main dialog
self.bot = MyBot()
def messages(self) -> Response:
"""Main bot message handler that listens for incoming requests."""
if "application/json" in request.headers["Content-Type"]:
body = request.json
else:
return Response(status=415)
activity = Activity().deserialize(body)
auth_header = (
request.headers["Authorization"]
if "Authorization" in request.headers
else ""
)
async def aux_func(turn_context):
await self.bot.on_turn(turn_context)
try:
task = self.loop.create_task(
self.adapter.process_activity(activity, auth_header, aux_func)
)
self.loop.run_until_complete(task)
return Response(status=201)
except Exception as exception:
raise exception
@staticmethod
def test() -> Response:
"""
For test only - verify if the flask app works locally - e.g. with:
```bash
curl http://127.0.0.1:3978/api/test
```
You shall get:
```
test
```
"""
return Response(status=200, response="test\n")
def run(self, host=None) -> None:
try:
self.flask.run(
host=host, debug=False, port=self.flask.config["PORT"]
) # nosec debug
except Exception as exception:
raise exception
|
botbuilder-python/tests/functional-tests/functionaltestbot/flask_bot_app/bot_app.py/0
|
{
"file_path": "botbuilder-python/tests/functional-tests/functionaltestbot/flask_bot_app/bot_app.py",
"repo_id": "botbuilder-python",
"token_count": 1492
}
| 425 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from .dialog_bot import DialogBot
from .auth_bot import AuthBot
__all__ = ["DialogBot", "AuthBot"]
|
botbuilder-python/tests/skills/skills-prototypes/dialog-to-dialog/authentication-bot/bots/__init__.py/0
|
{
"file_path": "botbuilder-python/tests/skills/skills-prototypes/dialog-to-dialog/authentication-bot/bots/__init__.py",
"repo_id": "botbuilder-python",
"token_count": 55
}
| 426 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import sys
import traceback
from datetime import datetime
from aiohttp import web
from aiohttp.web import Request, Response
from botbuilder.core import (
BotFrameworkAdapterSettings,
ConversationState,
MemoryStorage,
TurnContext,
BotFrameworkAdapter,
)
from botbuilder.core.integration import (
aiohttp_channel_service_routes,
aiohttp_error_middleware,
BotFrameworkHttpClient,
)
from botbuilder.core.skills import SkillConversationIdFactory, SkillHandler
from botbuilder.schema import Activity, ActivityTypes
from botframework.connector.auth import (
AuthenticationConfiguration,
SimpleCredentialProvider,
)
from bots import RootBot
from config import DefaultConfig, SkillConfiguration
CONFIG = DefaultConfig()
SKILL_CONFIG = SkillConfiguration()
CREDENTIAL_PROVIDER = SimpleCredentialProvider(CONFIG.APP_ID, CONFIG.APP_PASSWORD)
CLIENT = BotFrameworkHttpClient(CREDENTIAL_PROVIDER)
# Create adapter.
# See https://aka.ms/about-bot-adapter to learn more about how bots work.
SETTINGS = BotFrameworkAdapterSettings(CONFIG.APP_ID, CONFIG.APP_PASSWORD)
ADAPTER = BotFrameworkAdapter(SETTINGS)
STORAGE = MemoryStorage()
CONVERSATION_STATE = ConversationState(STORAGE)
ID_FACTORY = SkillConversationIdFactory(STORAGE)
# Catch-all for errors.
async def on_error(context: TurnContext, error: Exception):
# This check writes out errors to console log .vs. app insights.
# NOTE: In production environment, you should consider logging this to Azure
# application insights.
print(f"\n [on_turn_error] unhandled error: {error}", file=sys.stderr)
traceback.print_exc()
# Send a message to the user
await context.send_activity("The bot encountered an error or bug.")
await context.send_activity(
"To continue to run this bot, please fix the bot source code."
)
# Send a trace activity if we're talking to the Bot Framework Emulator
if context.activity.channel_id == "emulator":
# Create a trace activity that contains the error object
trace_activity = Activity(
label="TurnError",
name="on_turn_error Trace",
timestamp=datetime.utcnow(),
type=ActivityTypes.trace,
value=f"{error}",
value_type="https://www.botframework.com/schemas/error",
)
# Send a trace activity, which will be displayed in Bot Framework Emulator
await context.send_activity(trace_activity)
ADAPTER.on_turn_error = on_error
# Create the Bot
BOT = RootBot(CONVERSATION_STATE, SKILL_CONFIG, ID_FACTORY, CLIENT, CONFIG)
SKILL_HANDLER = SkillHandler(
ADAPTER, BOT, ID_FACTORY, CREDENTIAL_PROVIDER, AuthenticationConfiguration()
)
# Listen for incoming requests on /api/messages
async def messages(req: Request) -> Response:
# Main bot message handler.
if "application/json" in req.headers["Content-Type"]:
body = await req.json()
else:
return Response(status=415)
activity = Activity().deserialize(body)
auth_header = req.headers["Authorization"] if "Authorization" in req.headers else ""
try:
await ADAPTER.process_activity(activity, auth_header, BOT.on_turn)
return Response(status=201)
except Exception as exception:
raise exception
APP = web.Application(middlewares=[aiohttp_error_middleware])
APP.router.add_post("/api/messages", messages)
APP.router.add_routes(aiohttp_channel_service_routes(SKILL_HANDLER, "/api/skills"))
if __name__ == "__main__":
try:
web.run_app(APP, host="localhost", port=CONFIG.PORT)
except Exception as error:
raise error
|
botbuilder-python/tests/skills/skills-prototypes/simple-bot-to-bot/simple-root-bot/app.py/0
|
{
"file_path": "botbuilder-python/tests/skills/skills-prototypes/simple-bot-to-bot/simple-root-bot/app.py",
"repo_id": "botbuilder-python",
"token_count": 1280
}
| 427 |
import argparse
import multiprocessing
import multiprocessing.pool
import os
import subprocess
from pathlib import Path
from typing import cast
import xml.etree.cElementTree as ET
import tempfile
import glob
import cffsubr.__main__
import fontmake.instantiator
import fontTools.designspaceLib
import fontTools.ttLib
import fontTools.ttLib.tables._g_l_y_f as _g_l_y_f
import psautohint.__main__
from gftools.stat import gen_stat_tables_from_config
import yaml
import ufo2ft
import ufoLib2
import vttLib
import vttLib.transfer
from vttmisc import tsi1, tsic
VERSION_YEAR_MONTH = 2404
VERSION_DAY = 23
OUTPUT_DIR = Path("build")
OUTPUT_OTF_DIR = OUTPUT_DIR / "otf"
OUTPUT_TTF_DIR = OUTPUT_DIR / "ttf"
OUTPUT_WOFF2_DIR = OUTPUT_DIR / "woff2"
OUTPUT_STATIC_OTF_DIR = OUTPUT_OTF_DIR / "static"
OUTPUT_STATIC_TTF_DIR = OUTPUT_TTF_DIR / "static"
OUTPUT_STATIC_WOFF2_DIR = OUTPUT_WOFF2_DIR / "static"
INPUT_DIR = Path("sources")
VTT_DATA_FILE = INPUT_DIR / "vtt_data" / "CascadiaCode_VTT.ttf"
ITALIC_VTT_DATA_FILE = INPUT_DIR / "vtt_data" / "CascadiaCodeItalic_VTT.ttf"
FEATURES_DIR = INPUT_DIR / "features"
NERDFONTS_DIR = INPUT_DIR / "nerdfonts"
# Font modifications
# ****************************************************************
def step_set_font_name(name: str, source: ufoLib2.Font) -> None:
source.info.familyName = source.info.familyName.replace("Cascadia Code", name)
# We have to change the style map family name because that's what
# Windows uses to map Bold/Regular/Medium/etc. fonts
if source.info.styleMapFamilyName:
source.info.styleMapFamilyName = source.info.styleMapFamilyName.replace("Cascadia Code", name)
def step_merge_glyphs_from_ufo(path: Path, instance: ufoLib2.Font) -> None:
unicodes = []
for glyph in instance:
unicodes.append(glyph.unicode)
ufo = ufoLib2.Font.open(path)
for glyph in ufo:
if glyph.unicode:
if glyph.unicode not in unicodes:
newName = str(hex(glyph.unicode)).upper().replace("0X","uni")
instance.layers.defaultLayer.insertGlyph(ufo[glyph.name],newName, overwrite=False, copy=False)
else:
instance.addGlyph(ufo[glyph.name])
def step_set_feature_file(path: Path, name: str, instance: ufoLib2.Font) -> None:
featureSet = ""
if "Italic" in name: #until I can come up with a more elegent solution, this'll do.
featureList = [
"header_italic", # adds definitions, language systems
"aalt_italic",
"ccmp",
"locl_italic",
"calt_italic",
"figures_italic", # contains subs/sinf/sups/numr/dnom
"frac",
"ordn",
"case",
"salt",
"ss01",
"ss02",
"ss03",
"ss19",
"ss20",
"rclt",
"zero"
]
else:
featureList = [
"header", # adds definitions, language systems
"aalt",
"ccmp",
"locl",
"calt",
"figures", # contains subs/sinf/sups/numr/dnom
"frac",
"ordn",
"case",
"ss02",
"ss19",
"ss20",
"rclt",
"zero",
"init",
"medi",
"fina",
"rlig",
]
for item in featureList:
if "PL" in name and item == "rclt":
featureSet += Path(path / str("rclt_PL.fea")).read_text()
elif "NF" in name and item == "rclt":
featureSet += Path(path / str("rclt_PL.fea")).read_text()
elif "Mono" in name and "calt" in item:
featureSet += Path(path / str(item+"_mono.fea")).read_text() #both Italic and Regular can use same mono
else:
featureSet += Path(path / str(item+".fea")).read_text()
instance.features.text = featureSet
def set_font_metaData(font: ufoLib2.Font) -> None:
font.info.versionMajor = VERSION_YEAR_MONTH
font.info.versionMinor = VERSION_DAY
font.info.openTypeOS2TypoAscender = 1900
font.info.openTypeOS2TypoDescender = -480
font.info.openTypeOS2TypoLineGap = 0
font.info.openTypeHheaAscender = font.info.openTypeOS2TypoAscender
font.info.openTypeHheaDescender = font.info.openTypeOS2TypoDescender
font.info.openTypeHheaLineGap = font.info.openTypeOS2TypoLineGap
font.info.openTypeOS2WinAscent = 2226
font.info.openTypeOS2WinDescent = abs(font.info.openTypeOS2TypoDescender)
font.info.openTypeGaspRangeRecords = [
{"rangeMaxPPEM": 9, "rangeGaspBehavior": [1, 3]},
{"rangeMaxPPEM": 50, "rangeGaspBehavior": [0, 1, 2, 3]},
{"rangeMaxPPEM": 65535, "rangeGaspBehavior": [1, 3]},
]
def set_overlap_flag(varfont: fontTools.ttLib.TTFont) -> fontTools.ttLib.TTFont:
glyf = cast(_g_l_y_f.table__g_l_y_f, varfont["glyf"])
for glyph_name in glyf.keys():
glyph = glyf[glyph_name]
if glyph.isComposite():
# Set OVERLAP_COMPOUND bit for compound glyphs
glyph.components[0].flags |= 0x400
elif glyph.numberOfContours > 0:
# Set OVERLAP_SIMPLE bit for simple glyphs
glyph.flags[0] |= 0x40
def prepare_fonts(
designspace: fontTools.designspaceLib.DesignSpaceDocument, name: str
) -> None:
designspace.loadSourceFonts(ufoLib2.Font.open)
for source in designspace.sources:
step_set_feature_file(FEATURES_DIR, name, source.font)
if "PL" in name or "NF" in name or "Mono" in name:
step_set_font_name(name, source.font)
if "PL" in name or "NF" in name:
print(f"[{name} {source.styleName}] Merging PL glyphs")
step_merge_glyphs_from_ufo(
NERDFONTS_DIR / "NerdfontsPL-Regular.ufo", source.font
)
if "NF" in name:
print(f"[{name} {source.styleName}] Merging NF glyphs")
for ufo in Path(NERDFONTS_DIR/"full"/"processed").glob("*.ufo"):
step_merge_glyphs_from_ufo(
ufo, source.font
)
set_font_metaData(source.font)
for instance in designspace.instances:
instance.name = instance.name.replace("Cascadia Code", name)
instance.familyName = instance.familyName.replace("Cascadia Code", name)
if instance.styleMapFamilyName:
instance.styleMapFamilyName = instance.styleMapFamilyName.replace("Cascadia Code", name)
def to_woff2(source_path: Path, target_path: Path) -> None:
print(f"[WOFF2] Compressing {source_path} to {target_path}")
font = fontTools.ttLib.TTFont(source_path)
font.flavor = "woff2"
target_path.parent.mkdir(exist_ok=True, parents=True)
font.save(target_path)
# Build fonts
# ****************************************************************
def build_font_variable(
designspace: fontTools.designspaceLib.DesignSpaceDocument,
name: str,
vtt_compile: bool = True,
) -> None:
prepare_fonts(designspace, name)
compile_variable_and_save(designspace, vtt_compile)
def build_font_static(
designspace: fontTools.designspaceLib.DesignSpaceDocument,
instance_descriptor: fontTools.designspaceLib.InstanceDescriptor,
name: str,
) -> None:
prepare_fonts(designspace, name)
generator = fontmake.instantiator.Instantiator.from_designspace(designspace)
instance = generator.generate_instance(instance_descriptor)
instance.info.familyName = instance.info.familyName.replace(" Italic","")
if instance.info.styleMapFamilyName:
instance.info.styleMapFamilyName = instance.info.styleMapFamilyName.replace(" Italic","")
compile_static_and_save(instance, name.replace(" Italic",""))
# Export fonts
# ****************************************************************
def compile_variable_and_save(
designspace: fontTools.designspaceLib.DesignSpaceDocument,
vtt_compile: bool = True,
) -> None:
if "Italic" in designspace.default.font.info.familyName: #Some weird stuff happens with Italics
designspace.default.font.info.familyName = designspace.default.font.info.familyName.replace(" Italic", "")
familyName = designspace.default.font.info.familyName
styleName = designspace.default.font.info.styleName
file_stem = familyName.replace(" ", "")
if "Italic" in styleName and "Italic" not in file_stem:
file_stem = file_stem+"Italic"
file_path: Path = (OUTPUT_TTF_DIR / file_stem).with_suffix(".ttf")
print(f"[{familyName} {styleName}] Compiling")
varFont = ufo2ft.compileVariableTTF(designspace, inplace=True)
print(f"[{familyName} {styleName}] Merging VTT")
if "Italic" in styleName:
font_vtt = fontTools.ttLib.TTFont(ITALIC_VTT_DATA_FILE)
else:
font_vtt = fontTools.ttLib.TTFont(VTT_DATA_FILE)
for table in ["TSI0", "TSI1", "TSI2", "TSI3", "TSI5", "TSIC", "maxp"]:
varFont[table] = fontTools.ttLib.newTable(table)
varFont[table] = font_vtt[table]
# this will correct the OFFSET[R] commands in TSI1
if font_vtt.getGlyphOrder() != varFont.getGlyphOrder():
tsi1.fixOFFSET(varFont, font_vtt)
pass
if vtt_compile:
print(f"[{familyName} {styleName}] Compiling VTT")
vttLib.compile_instructions(varFont, ship=True)
else:
file_path = (OUTPUT_TTF_DIR / str(file_stem+"_VTT")).with_suffix(".ttf")
# last minute manual corrections to set things correctly
# set two flags to enable proper rendering (one for overlaps in Mac, the other for windows hinting)
# Helping mac office generage the postscript name correctly for variable fonts when an italic is present
set_overlap_flag(varFont)
varFont["head"].flags = 0x000b
if "Regular" in styleName:
varFont["name"].setName(familyName.replace(" ","")+"Roman", 25, 3, 1, 1033)
print(f"[{familyName} {styleName}] Saving")
file_path.parent.mkdir(exist_ok=True, parents=True)
varFont.save(file_path)
print(f"[{familyName}] Done: {file_path}")
def compile_static_and_save(instance: ufoLib2.Font, name:str) -> None:
family_name = name
style_name = instance.info.styleName
print(f"[{family_name}] Building static instance: {style_name}")
# Use pathops backend for overlap removal because it is, at the time of this
# writing, massively faster than booleanOperations and thanks to autohinting,
# there is no need to keep outlines compatible to previous releases.
static_ttf = ufo2ft.compileTTF(
instance, removeOverlaps=True, overlapsBackend="pathops"
)
static_otf = ufo2ft.compileOTF(
instance,
removeOverlaps=True,
overlapsBackend="pathops",
# Can do inplace now because TTF is already done.
inplace=True,
# Don't optimize here, will be optimized after autohinting.
optimizeCFF=ufo2ft.CFFOptimization.NONE,
)
file_name = f"{family_name}-{style_name}".replace(" ", "")
file_path_static = (OUTPUT_STATIC_TTF_DIR / file_name).with_suffix(".ttf")
file_path_static_otf = (OUTPUT_STATIC_OTF_DIR / file_name).with_suffix(".otf")
file_path_static.parent.mkdir(exist_ok=True, parents=True)
static_ttf.save(file_path_static)
file_path_static_otf.parent.mkdir(exist_ok=True, parents=True)
static_otf.save(file_path_static_otf)
print(f"[{family_name}] Done: {file_path_static}, {file_path_static_otf}")
# Font hinting
# ****************************************************************
def autohint(otf_path: Path) -> None:
path = os.fspath(otf_path)
print(f"Autohinting {path}")
psautohint.__main__.main([path])
print(f"Compressing {path}")
cffsubr.__main__.main(["-i", path])
def ttfautohint(path: str) -> None:
print(f"Autohinting {path}")
subprocess.check_call(
[
"ttfautohint",
"--stem-width",
"nsn",
"--increase-x-height",
"0",
"--reference",
os.fspath(OUTPUT_STATIC_TTF_DIR / "CascadiaCode-Regular.ttf"),
path,
path[:-4] + "-hinted.ttf",
]
)
os.remove(path)
os.rename(path[:-4] + "-hinted.ttf", path)
# Main build script
# ****************************************************************
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="build some fonts")
parser.add_argument("-P", "--no-powerline", action="store_false", dest="powerline")
parser.add_argument("-NF", "--no-nerdfonts", action="store_false", dest="nerdfonts")
parser.add_argument("-M", "--no-mono", action="store_false", dest="mono")
parser.add_argument("-S", "--static-fonts", action="store_true")
parser.add_argument("-I", "--no-italic", action="store_false", dest="italic")
parser.add_argument(
"-V",
"--no-vtt-compile",
action="store_false",
dest="vtt_compile",
help="Do not compile VTT code but leave in the VTT sources.",
)
parser.add_argument("-W", "--web-fonts", action="store_true")
args = parser.parse_args()
# Load Designspace and filter out instances that are marked as non-exportable.
designspace = fontTools.designspaceLib.DesignSpaceDocument.fromfile(
INPUT_DIR / "CascadiaCode_variable.designspace"
)
designspace.instances = [
s
for s in designspace.instances
if s.lib.get("com.schriftgestaltung.export", True)
]
designspaceItalic = fontTools.designspaceLib.DesignSpaceDocument.fromfile(
INPUT_DIR / "CascadiaCode_variable_italic.designspace"
)
designspaceItalic.instances = [
s
for s in designspaceItalic.instances
if s.lib.get("com.schriftgestaltung.export", True)
]
#Stage 1: Make all the things.
pool = multiprocessing.pool.Pool(processes=multiprocessing.cpu_count())
processes = []
processes.append(
pool.apply_async(
build_font_variable,
(
designspace,
"Cascadia Code",
args.vtt_compile,
),
)
)
if args.italic:
processes.append(
pool.apply_async(
build_font_variable,
(
designspaceItalic,
"Cascadia Code Italic",
args.vtt_compile,
),
)
)
if args.mono:
processes.append(
pool.apply_async(
build_font_variable,
(
designspace,
"Cascadia Mono",
args.vtt_compile,
),
)
)
if args.italic:
processes.append(
pool.apply_async(
build_font_variable,
(
designspaceItalic,
"Cascadia Mono Italic",
args.vtt_compile,
),
)
)
if args.powerline:
processes.append(
pool.apply_async(
build_font_variable,
(
designspace,
"Cascadia Code PL",
args.vtt_compile,
),
)
)
if args.italic:
processes.append(
pool.apply_async(
build_font_variable,
(
designspaceItalic,
"Cascadia Code PL Italic",
args.vtt_compile,
),
)
)
if args.mono:
processes.append(
pool.apply_async(
build_font_variable,
(
designspace,
"Cascadia Mono PL",
args.vtt_compile,
),
)
)
if args.italic:
processes.append(
pool.apply_async(
build_font_variable,
(
designspaceItalic,
"Cascadia Mono PL Italic",
args.vtt_compile,
),
)
)
if args.nerdfonts:
processes.append(
pool.apply_async(
build_font_variable,
(
designspace,
"Cascadia Code NF",
args.vtt_compile,
),
)
)
if args.italic:
processes.append(
pool.apply_async(
build_font_variable,
(
designspaceItalic,
"Cascadia Code NF Italic",
args.vtt_compile,
),
)
)
if args.mono:
processes.append(
pool.apply_async(
build_font_variable,
(
designspace,
"Cascadia Mono NF",
args.vtt_compile,
),
)
)
if args.italic:
processes.append(
pool.apply_async(
build_font_variable,
(
designspaceItalic,
"Cascadia Mono NF Italic",
args.vtt_compile,
),
)
)
if args.static_fonts:
# Build the Regulars
for instance_descriptor in designspace.instances:
processes.append(
pool.apply_async(
build_font_static,
(
designspace,
instance_descriptor,
"Cascadia Code",
),
)
)
if args.mono:
processes.append(
pool.apply_async(
build_font_static,
(
designspace,
instance_descriptor,
"Cascadia Mono",
),
)
)
if args.powerline:
processes.append(
pool.apply_async(
build_font_static,
(
designspace,
instance_descriptor,
"Cascadia Code PL",
),
)
)
if args.mono:
processes.append(
pool.apply_async(
build_font_static,
(
designspace,
instance_descriptor,
"Cascadia Mono PL",
),
)
)
if args.nerdfonts:
processes.append(
pool.apply_async(
build_font_static,
(
designspace,
instance_descriptor,
"Cascadia Code NF",
),
)
)
if args.mono:
processes.append(
pool.apply_async(
build_font_static,
(
designspace,
instance_descriptor,
"Cascadia Mono NF",
),
)
)
if args.italic:
# Build the Regulars
for instance_descriptor in designspaceItalic.instances:
processes.append(
pool.apply_async(
build_font_static,
(
designspaceItalic,
instance_descriptor,
"Cascadia Code Italic",
),
)
)
if args.mono:
processes.append(
pool.apply_async(
build_font_static,
(
designspaceItalic,
instance_descriptor,
"Cascadia Mono Italic",
),
)
)
if args.powerline:
processes.append(
pool.apply_async(
build_font_static,
(
designspaceItalic,
instance_descriptor,
"Cascadia Code PL Italic",
),
)
)
if args.mono:
processes.append(
pool.apply_async(
build_font_static,
(
designspaceItalic,
instance_descriptor,
"Cascadia Mono PL Italic",
),
)
)
if args.nerdfonts:
processes.append(
pool.apply_async(
build_font_static,
(
designspaceItalic,
instance_descriptor,
"Cascadia Code NF Italic",
),
)
)
if args.mono:
processes.append(
pool.apply_async(
build_font_static,
(
designspaceItalic,
instance_descriptor,
"Cascadia Mono NF Italic",
),
)
)
pool.close()
pool.join()
for process in processes:
process.get()
del processes, pool
# Step 1.5: Adding STAT tables in one go
print ("[Cascadia Variable fonts] Fixing STAT tables")
fontSTAT = [fontTools.ttLib.TTFont(f) for f in list(OUTPUT_TTF_DIR.glob("*.ttf"))]
with open(INPUT_DIR/"stat.yaml") as f:
config = yaml.load(f, Loader=yaml.SafeLoader)
gen_stat_tables_from_config(config, fontSTAT)
for font in fontSTAT:
font.save(font.reader.file.name)
# Stage 2: Autohint and maybe compress all the static things.
if args.static_fonts is True:
otfs = list(OUTPUT_STATIC_OTF_DIR.glob("*.otf"))
if otfs:
pool = multiprocessing.Pool(processes=multiprocessing.cpu_count())
processes = [pool.apply_async(autohint, (otf,)) for otf in otfs]
pool.close()
pool.join()
for process in processes:
process.get()
del processes, pool
try:
for ttf_path in OUTPUT_STATIC_TTF_DIR.glob("*.ttf"):
if not ttf_path.stem.endswith("-hinted"):
ttfautohint(os.fspath(ttf_path))
except Exception as e:
print(f"ttfautohint failed. Please reinstall and try again. {str(e)}")
# Stage 3: Have some web fonts.
if args.web_fonts:
pool = multiprocessing.Pool(processes=multiprocessing.cpu_count())
processes = [
pool.apply_async(
to_woff2,
(
path,
# This removes build/ttf from the found files and prepends
# build/woff2 instead, keeping the sub-structure.
OUTPUT_WOFF2_DIR
/ path.relative_to(OUTPUT_TTF_DIR).with_suffix(".woff2"),
),
)
for path in OUTPUT_TTF_DIR.glob("**/*.ttf")
]
pool.close()
pool.join()
for process in processes:
process.get()
print("All done.")
|
cascadia-code/build.py/0
|
{
"file_path": "cascadia-code/build.py",
"repo_id": "cascadia-code",
"token_count": 13624
}
| 428 |
# Add or remove dependencies here and then use pip-tool's
# pip-compile -U requirements.in
# to update requirements.txt (the lock file, so to speak).
fontmake
psautohint
gftools
vttLib
skia-pathops
fontTools[ufo,lxml,woff]
vttmisc
|
cascadia-code/requirements.in/0
|
{
"file_path": "cascadia-code/requirements.in",
"repo_id": "cascadia-code",
"token_count": 84
}
| 429 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="Ahookabove" format="2">
<advance width="1200"/>
<unicode hex="1EA2"/>
<outline>
<component base="A"/>
<component base="hookabovecomb.case"/>
</outline>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/A_hookabove.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/A_hookabove.glif",
"repo_id": "cascadia-code",
"token_count": 91
}
| 430 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="Dcroat" format="2">
<advance width="1200"/>
<unicode hex="0110"/>
<outline>
<component base="Eth"/>
</outline>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/D_croat.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/D_croat.glif",
"repo_id": "cascadia-code",
"token_count": 76
}
| 431 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="Eng" format="2">
<advance width="1200"/>
<unicode hex="014A"/>
<anchor x="600" y="-388" name="bottom"/>
<outline>
<contour>
<point x="493" y="-480" type="line"/>
<point x="699" y="-477" type="line" smooth="yes"/>
<point x="955" y="-473"/>
<point x="1069" y="-355"/>
<point x="1069" y="-96" type="curve" smooth="yes"/>
<point x="1069" y="184" type="line"/>
<point x="815" y="184" type="line"/>
<point x="815" y="-88" type="line" smooth="yes"/>
<point x="815" y="-187"/>
<point x="771" y="-231"/>
<point x="671" y="-237" type="curve" smooth="yes"/>
<point x="491" y="-240" type="line"/>
</contour>
<component base="N"/>
</outline>
<lib>
<dict>
<key>com.schriftgestaltung.Glyphs.ComponentInfo</key>
<array>
<dict>
<key>alignment</key>
<integer>-1</integer>
<key>index</key>
<integer>0</integer>
<key>name</key>
<string>N</string>
</dict>
</array>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/E_ng.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/E_ng.glif",
"repo_id": "cascadia-code",
"token_count": 559
}
| 432 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="F.half" format="2">
<advance width="839"/>
<outline>
<contour>
<point x="109" y="0" type="line"/>
<point x="260" y="0" type="line"/>
<point x="260" y="639" type="line"/>
<point x="109" y="639" type="line"/>
</contour>
<contour>
<point x="109" y="223" type="line"/>
<point x="643" y="223" type="line"/>
<point x="643" y="359" type="line"/>
<point x="109" y="359" type="line"/>
</contour>
<contour>
<point x="109" y="504" type="line"/>
<point x="772" y="504" type="line"/>
<point x="772" y="639" type="line"/>
<point x="109" y="639" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/F_.half.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/F_.half.glif",
"repo_id": "cascadia-code",
"token_count": 349
}
| 433 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="Io-cy" format="2">
<advance width="1200"/>
<unicode hex="0401"/>
<outline>
<component base="Ie-cy"/>
<component base="dieresiscomb.case" xOffset="30"/>
</outline>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/I_o-cy.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/I_o-cy.glif",
"repo_id": "cascadia-code",
"token_count": 100
}
| 434 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="Kacute" format="2">
<advance width="1200"/>
<unicode hex="1E30"/>
<outline>
<component base="K"/>
<component base="acutecomb.case" xOffset="99"/>
</outline>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/K_acute.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/K_acute.glif",
"repo_id": "cascadia-code",
"token_count": 97
}
| 435 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="Ldot" format="2">
<advance width="1200"/>
<unicode hex="013F"/>
<outline>
<component base="L"/>
<component base="periodcentered" xOffset="233" yOffset="220"/>
</outline>
<lib>
<dict>
<key>com.schriftgestaltung.Glyphs.ComponentInfo</key>
<array>
<dict>
<key>alignment</key>
<integer>-1</integer>
<key>index</key>
<integer>0</integer>
<key>name</key>
<string>L</string>
</dict>
<dict>
<key>alignment</key>
<integer>-1</integer>
<key>index</key>
<integer>1</integer>
<key>name</key>
<string>periodcentered</string>
</dict>
</array>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/L_dot.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/L_dot.glif",
"repo_id": "cascadia-code",
"token_count": 418
}
| 436 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="Nhookleft" format="2">
<advance width="1200"/>
<unicode hex="019D"/>
<anchor x="600" y="-388" name="bottom"/>
<outline>
<contour>
<point x="-74" y="-480" type="line"/>
<point x="15" y="-477" type="line" smooth="yes"/>
<point x="271" y="-473"/>
<point x="385" y="-355"/>
<point x="385" y="-96" type="curve" smooth="yes"/>
<point x="385" y="184" type="line"/>
<point x="131" y="184" type="line"/>
<point x="131" y="-88" type="line" smooth="yes"/>
<point x="131" y="-187"/>
<point x="88" y="-235"/>
<point x="-13" y="-237" type="curve" smooth="yes"/>
<point x="-76" y="-240" type="line"/>
</contour>
<component base="N"/>
</outline>
<lib>
<dict>
<key>com.schriftgestaltung.Glyphs.ComponentInfo</key>
<array>
<dict>
<key>alignment</key>
<integer>-1</integer>
<key>index</key>
<integer>0</integer>
<key>name</key>
<string>N</string>
</dict>
</array>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/N_hookleft.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/N_hookleft.glif",
"repo_id": "cascadia-code",
"token_count": 560
}
| 437 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="Ocircumflexgrave" format="2">
<advance width="1200"/>
<unicode hex="1ED2"/>
<outline>
<component base="O"/>
<component base="circumflexcomb.case"/>
<component base="gravecomb.case" xOffset="304" yOffset="248"/>
</outline>
<lib>
<dict>
<key>com.schriftgestaltung.Glyphs.ComponentInfo</key>
<array>
<dict>
<key>anchor</key>
<string>top_viet</string>
<key>index</key>
<integer>2</integer>
<key>name</key>
<string>gravecomb.case</string>
</dict>
</array>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/O_circumflexgrave.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/O_circumflexgrave.glif",
"repo_id": "cascadia-code",
"token_count": 317
}
| 438 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="Omacronacute" format="2">
<advance width="1200"/>
<unicode hex="1E52"/>
<outline>
<component base="O"/>
<component base="macroncomb.case"/>
<component base="acutecomb.case" xOffset="79" yOffset="341"/>
</outline>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/O_macronacute.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/O_macronacute.glif",
"repo_id": "cascadia-code",
"token_count": 118
}
| 439 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="Pi" format="2">
<advance width="1200"/>
<unicode hex="03A0"/>
<outline>
<component base="Pe-cy"/>
</outline>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/P_i.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/P_i.glif",
"repo_id": "cascadia-code",
"token_count": 77
}
| 440 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="Scedilla" format="2">
<advance width="1200"/>
<unicode hex="015E"/>
<outline>
<component base="S"/>
<component base="cedillacomb" xOffset="35"/>
</outline>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/S_cedilla.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/S_cedilla.glif",
"repo_id": "cascadia-code",
"token_count": 93
}
| 441 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="Tcedilla" format="2">
<advance width="1200"/>
<unicode hex="0162"/>
<outline>
<component base="T"/>
<component base="cedillacomb" xOffset="20"/>
</outline>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/T_cedilla.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/T_cedilla.glif",
"repo_id": "cascadia-code",
"token_count": 93
}
| 442 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="Udotbelow" format="2">
<advance width="1200"/>
<unicode hex="1EE4"/>
<outline>
<component base="U"/>
<component base="dotbelowcomb"/>
</outline>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/U_dotbelow.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/U_dotbelow.glif",
"repo_id": "cascadia-code",
"token_count": 89
}
| 443 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="Zacute" format="2">
<advance width="1200"/>
<unicode hex="0179"/>
<outline>
<component base="Z"/>
<component base="acutecomb.case" xOffset="79"/>
</outline>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/Z_acute.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/Z_acute.glif",
"repo_id": "cascadia-code",
"token_count": 96
}
| 444 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="_alefWasla-ar.fina.rlig" format="2">
<anchor x="0" y="0" name="_overlap"/>
<anchor x="343" y="-141" name="bottom"/>
<anchor x="249" y="1624" name="top"/>
<outline>
<component base="_alef-ar.fina.short.rlig"/>
<component base="wasla-ar" xOffset="-357" yOffset="65"/>
</outline>
<lib>
<dict>
<key>com.schriftgestaltung.Glyphs.ComponentInfo</key>
<array>
<dict>
<key>alignment</key>
<integer>-1</integer>
<key>index</key>
<integer>0</integer>
<key>name</key>
<string>_alef-ar.fina.short.rlig</string>
</dict>
</array>
<key>public.markColor</key>
<string>0.98,0.36,0.67,1</string>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/_alefW_asla-ar.fina.rlig.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/_alefW_asla-ar.fina.rlig.glif",
"repo_id": "cascadia-code",
"token_count": 406
}
| 445 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="_vbelow-ar" format="2">
<advance width="1200"/>
<anchor x="602" y="-117" name="_bottom"/>
<anchor x="602" y="-2" name="_bottom.dot"/>
<anchor x="603" y="-476" name="bottom"/>
<outline>
<component base="_vabove" yOffset="-995"/>
</outline>
<lib>
<dict>
<key>com.schriftgestaltung.Glyphs.ComponentInfo</key>
<array>
<dict>
<key>alignment</key>
<integer>-1</integer>
<key>index</key>
<integer>0</integer>
<key>name</key>
<string>_vabove</string>
</dict>
</array>
<key>public.markColor</key>
<string>0.98,0.36,0.67,1</string>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/_vbelow-ar.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/_vbelow-ar.glif",
"repo_id": "cascadia-code",
"token_count": 372
}
| 446 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="acircumflexdotbelow" format="2">
<advance width="1200"/>
<unicode hex="1EAD"/>
<outline>
<component base="a"/>
<component base="dotbelowcomb"/>
<component base="circumflexcomb" xOffset="-20"/>
</outline>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/acircumflexdotbelow.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/acircumflexdotbelow.glif",
"repo_id": "cascadia-code",
"token_count": 111
}
| 447 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="alef-ar.short" format="2">
<advance width="1200"/>
<anchor x="610" y="-141" name="bottom"/>
<anchor x="610" y="-26" name="bottom.dot"/>
<anchor x="588" y="1204" name="top.dot"/>
<outline>
<contour>
<point x="471" y="0" type="line"/>
<point x="745" y="0" type="line"/>
<point x="724" y="1197" type="line"/>
<point x="450" y="1197" type="line"/>
</contour>
</outline>
<lib>
<dict>
<key>public.markColor</key>
<string>0.98,0.36,0.67,1</string>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/alef-ar.short.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/alef-ar.short.glif",
"repo_id": "cascadia-code",
"token_count": 281
}
| 448 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="alefMadda-ar" format="2">
<advance width="1200"/>
<unicode hex="0622"/>
<anchor x="588" y="1546" name="top"/>
<outline>
<component base="alef-ar.short"/>
<component base="madda-ar" xOffset="-15" yOffset="81"/>
</outline>
<lib>
<dict>
<key>public.markColor</key>
<string>0.98,0.36,0.67,1</string>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/alefM_adda-ar.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/alefM_adda-ar.glif",
"repo_id": "cascadia-code",
"token_count": 193
}
| 449 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="alefdagesh-hb.BRACKET.500" format="2">
<advance width="1200"/>
<anchor x="448.786" y="373.713" name="_center"/>
<outline>
<contour>
<point x="448.786" y="306.095" type="curve" smooth="yes"/>
<point x="486.352" y="306.095"/>
<point x="517.155" y="336.147"/>
<point x="517.155" y="373.713" type="curve" smooth="yes"/>
<point x="517.155" y="411.278"/>
<point x="486.352" y="442.082"/>
<point x="448.786" y="442.082" type="curve" smooth="yes"/>
<point x="411.221" y="442.082"/>
<point x="380.417" y="411.278"/>
<point x="380.417" y="373.713" type="curve" smooth="yes"/>
<point x="380.417" y="336.147"/>
<point x="411.221" y="306.095"/>
</contour>
<component base="alef-hb"/>
</outline>
<lib>
<dict>
<key>com.schriftgestaltung.Glyphs._originalLayerName</key>
<string>[500]</string>
<key>public.markColor</key>
<string>0.97,1,0,1</string>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/alefdagesh-hb.B_R_A_C_K_E_T_.500.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/alefdagesh-hb.B_R_A_C_K_E_T_.500.glif",
"repo_id": "cascadia-code",
"token_count": 504
}
| 450 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="beh-ar.init" format="2">
<advance width="1200"/>
<outline>
<component base="behDotless-ar.init"/>
<component base="dotbelow-ar" xOffset="170" yOffset="-24"/>
</outline>
<lib>
<dict>
<key>public.markColor</key>
<string>0.98,0.36,0.67,1</string>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/beh-ar.init.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/beh-ar.init.glif",
"repo_id": "cascadia-code",
"token_count": 164
}
| 451 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="behThreedotshorizontalbelow-ar.alt" format="2">
<advance width="1200"/>
<outline>
<component base="behDotless-ar.alt"/>
<component base="_dots.horz.below" xOffset="-618" yOffset="-18"/>
</outline>
<lib>
<dict>
<key>com.schriftgestaltung.Glyphs.ComponentInfo</key>
<array>
<dict>
<key>anchor</key>
<string>bottom.dot</string>
<key>index</key>
<integer>1</integer>
<key>name</key>
<string>_dots.horz.below</string>
</dict>
</array>
<key>public.markColor</key>
<string>0.98,0.36,0.67,1</string>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/behT_hreedotshorizontalbelow-ar.alt.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/behT_hreedotshorizontalbelow-ar.alt.glif",
"repo_id": "cascadia-code",
"token_count": 355
}
| 452 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="behThreedotsupbelow-ar.fina" format="2">
<advance width="1200"/>
<outline>
<component base="behDotless-ar.fina"/>
<component base="threedotsupbelow-ar" xOffset="-20" yOffset="-24"/>
</outline>
<lib>
<dict>
<key>public.markColor</key>
<string>0.98,0.36,0.67,1</string>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/behT_hreedotsupbelow-ar.fina.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/behT_hreedotsupbelow-ar.fina.glif",
"repo_id": "cascadia-code",
"token_count": 175
}
| 453 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="behVabove-ar.init.alt" format="2">
<advance width="1200"/>
<anchor x="0" y="0" name="overlap"/>
<outline>
<component base="behDotless-ar.init.alt"/>
<component base="vabove-ar" xOffset="275" yOffset="345"/>
</outline>
<lib>
<dict>
<key>com.schriftgestaltung.Glyphs.ComponentInfo</key>
<array>
<dict>
<key>anchor</key>
<string>top.dot</string>
<key>index</key>
<integer>1</integer>
<key>name</key>
<string>vabove-ar</string>
</dict>
</array>
<key>public.markColor</key>
<string>0.98,0.36,0.67,1</string>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/behV_above-ar.init.alt.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/behV_above-ar.init.alt.glif",
"repo_id": "cascadia-code",
"token_count": 362
}
| 454 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="behVinvertedbelow-ar.medi" format="2">
<advance width="1200"/>
<outline>
<component base="behDotless-ar.medi"/>
<component base="_vinvertedbelow-ar" xOffset="-10" yOffset="-24"/>
</outline>
<lib>
<dict>
<key>public.markColor</key>
<string>0.98,0.36,0.67,1</string>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/behV_invertedbelow-ar.medi.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/behV_invertedbelow-ar.medi.glif",
"repo_id": "cascadia-code",
"token_count": 171
}
| 455 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="breve" format="2">
<advance width="1200"/>
<unicode hex="02D8"/>
<outline>
<component base="brevecomb"/>
</outline>
<lib>
<dict>
<key>com.schriftgestaltung.Glyphs.ComponentInfo</key>
<array>
<dict>
<key>alignment</key>
<integer>-1</integer>
<key>index</key>
<integer>0</integer>
<key>name</key>
<string>brevecomb</string>
</dict>
</array>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/breve.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/breve.glif",
"repo_id": "cascadia-code",
"token_count": 275
}
| 456 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="colon_colon_equal.liga" format="2">
<advance width="1200"/>
<outline>
<contour>
<point x="1900" y="835" type="line"/>
<point x="3320" y="835" type="line"/>
<point x="3320" y="1085" type="line"/>
<point x="1900" y="1085" type="line"/>
</contour>
<contour>
<point x="1900" y="333" type="line"/>
<point x="3320" y="333" type="line"/>
<point x="3320" y="583" type="line"/>
<point x="1900" y="583" type="line"/>
</contour>
<component base="colon" xOffset="650" yOffset="180"/>
<component base="colon" xOffset="-120" yOffset="180"/>
</outline>
<lib>
<dict>
<key>com.schriftgestaltung.Glyphs.ComponentInfo</key>
<array>
<dict>
<key>alignment</key>
<integer>-1</integer>
<key>index</key>
<integer>0</integer>
<key>name</key>
<string>colon</string>
</dict>
<dict>
<key>alignment</key>
<integer>-1</integer>
<key>index</key>
<integer>1</integer>
<key>name</key>
<string>colon</string>
</dict>
</array>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/colon_colon_equal.liga.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/colon_colon_equal.liga.glif",
"repo_id": "cascadia-code",
"token_count": 628
}
| 457 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="cuberoot-ar" format="2">
<advance width="1200"/>
<unicode hex="0606"/>
<outline>
<component base="_cuberoot_fourthroot-ar"/>
<component base="three-persian.small01" xOffset="110" yOffset="-115"/>
</outline>
<lib>
<dict>
<key>com.schriftgestaltung.Glyphs.ComponentInfo</key>
<array>
<dict>
<key>alignment</key>
<integer>-1</integer>
<key>index</key>
<integer>0</integer>
<key>name</key>
<string>_cuberoot_fourthroot-ar</string>
</dict>
<dict>
<key>alignment</key>
<integer>-1</integer>
<key>index</key>
<integer>1</integer>
<key>name</key>
<string>three-persian.small01</string>
</dict>
</array>
<key>com.schriftgestaltung.Glyphs.glyph.leftMetricsKey</key>
<string>_cuberoot_fourthroot-ar</string>
<key>com.schriftgestaltung.Glyphs.glyph.rightMetricsKey</key>
<string>_cuberoot_fourthroot-ar</string>
<key>public.markColor</key>
<string>0.98,0.36,0.67,1</string>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/cuberoot-ar.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/cuberoot-ar.glif",
"repo_id": "cascadia-code",
"token_count": 595
}
| 458 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="dalThreedotsdown-ar.fina" format="2">
<advance width="1200"/>
<guideline x="127" y="632" angle="0"/>
<outline>
<component base="dal-ar.fina"/>
<component base="threedotsdownabove-ar" xOffset="36" yOffset="502"/>
</outline>
<lib>
<dict>
<key>public.markColor</key>
<string>0.98,0.36,0.67,1</string>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/dalT_hreedotsdown-ar.fina.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/dalT_hreedotsdown-ar.fina.glif",
"repo_id": "cascadia-code",
"token_count": 188
}
| 459 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="deviceControlThreeControl" format="2">
<advance width="1200"/>
<unicode hex="2413"/>
<outline>
<component base="D.half" xOffset="-10" yOffset="781"/>
<component base="three.half" xOffset="370"/>
</outline>
<lib>
<dict>
<key>com.schriftgestaltung.Glyphs.ComponentInfo</key>
<array>
<dict>
<key>alignment</key>
<integer>-1</integer>
<key>index</key>
<integer>0</integer>
<key>name</key>
<string>D.half</string>
</dict>
<dict>
<key>alignment</key>
<integer>-1</integer>
<key>index</key>
<integer>1</integer>
<key>name</key>
<string>three.half</string>
</dict>
</array>
<key>com.schriftgestaltung.Glyphs.glyph.widthMetricsKey</key>
<string>space</string>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/deviceC_ontrolT_hreeC_ontrol.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/deviceC_ontrolT_hreeC_ontrol.glif",
"repo_id": "cascadia-code",
"token_count": 477
}
| 460 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="dotabove-ar" format="2">
<advance width="1200"/>
<anchor x="600" y="782" name="_top"/>
<anchor x="600" y="542" name="_top.dot"/>
<anchor x="601" y="1002" name="top"/>
<outline>
<component base="dotbelow-ar" yOffset="1016"/>
</outline>
<lib>
<dict>
<key>public.markColor</key>
<string>0.98,0.36,0.67,1</string>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/dotabove-ar.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/dotabove-ar.glif",
"repo_id": "cascadia-code",
"token_count": 197
}
| 461 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="ehookabove" format="2">
<advance width="1200"/>
<unicode hex="1EBB"/>
<outline>
<component base="e"/>
<component base="hookabovecomb" xOffset="20"/>
</outline>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/ehookabove.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/ehookabove.glif",
"repo_id": "cascadia-code",
"token_count": 94
}
| 462 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="emacron" format="2">
<advance width="1200"/>
<unicode hex="0113"/>
<outline>
<component base="e"/>
<component base="macroncomb"/>
</outline>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/emacron.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/emacron.glif",
"repo_id": "cascadia-code",
"token_count": 88
}
| 463 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="feh-ar.medi" format="2">
<advance width="1200"/>
<outline>
<component base="fehDotless-ar.medi"/>
<component base="dotabove-ar" xOffset="2" yOffset="347"/>
</outline>
<lib>
<dict>
<key>public.markColor</key>
<string>0.98,0.36,0.67,1</string>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/feh-ar.medi.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/feh-ar.medi.glif",
"repo_id": "cascadia-code",
"token_count": 165
}
| 464 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="fehDotbelowThreedotsabove-ar" format="2">
<advance width="1200"/>
<unicode hex="08A4"/>
<outline>
<component base="fehDotless-ar"/>
<component base="dotbelow-ar" xOffset="290" yOffset="-24"/>
<component base="threedotsupabove-ar" xOffset="249" yOffset="460"/>
</outline>
<lib>
<dict>
<key>com.schriftgestaltung.Glyphs.ComponentInfo</key>
<array>
<dict>
<key>anchor</key>
<string>bottom.dot</string>
<key>index</key>
<integer>1</integer>
<key>name</key>
<string>dotbelow-ar</string>
</dict>
<dict>
<key>anchor</key>
<string>top.dot</string>
<key>index</key>
<integer>2</integer>
<key>name</key>
<string>threedotsupabove-ar</string>
</dict>
</array>
<key>public.markColor</key>
<string>0.98,0.36,0.67,1</string>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/fehD_otbelowT_hreedotsabove-ar.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/fehD_otbelowT_hreedotsabove-ar.glif",
"repo_id": "cascadia-code",
"token_count": 514
}
| 465 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="fehThreedotsupbelow-ar.alt" format="2">
<advance width="1200"/>
<outline>
<component base="fehDotless-ar.alt"/>
<component base="threedotsupbelow-ar" xOffset="-38" yOffset="-18"/>
</outline>
<lib>
<dict>
<key>com.schriftgestaltung.Glyphs.ComponentInfo</key>
<array>
<dict>
<key>anchor</key>
<string>bottom.dot</string>
<key>index</key>
<integer>1</integer>
<key>name</key>
<string>threedotsupbelow-ar</string>
</dict>
</array>
<key>public.markColor</key>
<string>0.98,0.36,0.67,1</string>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/fehT_hreedotsupbelow-ar.alt.glif/0
|
{
"file_path": "cascadia-code/sources/CascadiaCode-Bold.ufo/glyphs/fehT_hreedotsupbelow-ar.alt.glif",
"repo_id": "cascadia-code",
"token_count": 353
}
| 466 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.